Merge remote-tracking branch 'origin/development' into Results.read-.place
This commit is contained in:
commit
65b03aeb2d
|
@ -96,7 +96,7 @@ processing:
|
|||
stage: python
|
||||
script:
|
||||
- cd $DAMASKROOT/python
|
||||
- pytest --basetemp=${TESTROOT}/python -v --cov --cov-report=term
|
||||
- COLUMNS=256 pytest --basetemp=${TESTROOT}/python -v --cov --cov-report=term
|
||||
- coverage report --fail-under=90
|
||||
except:
|
||||
- master
|
||||
|
@ -116,7 +116,7 @@ compile_grid_Intel:
|
|||
script:
|
||||
- module load $IntelCompiler $MPI_Intel $PETSc_Intel
|
||||
- cd pytest
|
||||
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel
|
||||
- COLUMNS=256 pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
@ -126,7 +126,7 @@ compile_mesh_Intel:
|
|||
script:
|
||||
- module load $IntelCompiler $MPI_Intel $PETSc_Intel
|
||||
- cd pytest
|
||||
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel
|
||||
- COLUMNS=256 pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
@ -136,7 +136,7 @@ compile_grid_GNU:
|
|||
script:
|
||||
- module load $GNUCompiler $MPI_GNU $PETSc_GNU
|
||||
- cd pytest
|
||||
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU
|
||||
- COLUMNS=256 pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
@ -146,7 +146,7 @@ compile_mesh_GNU:
|
|||
script:
|
||||
- module load $GNUCompiler $MPI_GNU $PETSc_GNU
|
||||
- cd pytest
|
||||
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU
|
||||
- COLUMNS=256 pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
@ -156,7 +156,7 @@ compile_Marc:
|
|||
script:
|
||||
- module load $IntelMarc $HDF5Marc $MSC
|
||||
- cd pytest
|
||||
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
|
||||
- COLUMNS=256 pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
@ -191,7 +191,7 @@ core:
|
|||
script:
|
||||
- module load $IntelCompiler $MPI_Intel $PETSc_Intel
|
||||
- cd pytest
|
||||
- pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v
|
||||
- COLUMNS=256 pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit 4511a963da5094db309a6a68783f24a23c76da81
|
||||
Subproject commit 4ce1f786dc2a613f29b2c8681fcf751d6803d38e
|
|
@ -2,3 +2,4 @@
|
|||
dist
|
||||
damask.egg-info
|
||||
.coverage
|
||||
.coverage.*
|
||||
|
|
|
@ -157,18 +157,18 @@ class ConfigMaterial(Config):
|
|||
f = h5py.File(fname,'r')
|
||||
|
||||
if grain_data is None:
|
||||
phase = f['/'.join((b,c,phases))][()].flatten()
|
||||
O = Rotation.from_Euler_angles(f['/'.join((b,c,Euler_angles))]).as_quaternion().reshape(-1,4) # noqa
|
||||
phase = f['/'.join([b,c,phases])][()].flatten()
|
||||
O = Rotation.from_Euler_angles(f['/'.join([b,c,Euler_angles])]).as_quaternion().reshape(-1,4) # noqa
|
||||
_,idx = np.unique(np.hstack([O,phase.reshape(-1,1)]),return_index=True,axis=0)
|
||||
idx = np.sort(idx)
|
||||
else:
|
||||
phase = f['/'.join((b,grain_data,phases))][()]
|
||||
O = Rotation.from_Euler_angles(f['/'.join((b,grain_data,Euler_angles))]).as_quaternion() # noqa
|
||||
phase = f['/'.join([b,grain_data,phases])][()]
|
||||
O = Rotation.from_Euler_angles(f['/'.join([b,grain_data,Euler_angles])]).as_quaternion() # noqa
|
||||
idx = np.arange(phase.size)
|
||||
|
||||
if cell_ensemble_data is not None and phase_names is not None:
|
||||
try:
|
||||
names = np.array([s.decode() for s in f['/'.join((b,cell_ensemble_data,phase_names))]])
|
||||
names = np.array([s.decode() for s in f['/'.join([b,cell_ensemble_data,phase_names])]])
|
||||
phase = names[phase]
|
||||
except KeyError:
|
||||
pass
|
||||
|
|
|
@ -305,18 +305,18 @@ class Grid:
|
|||
c = util.DREAM3D_cell_data_group(fname) if cell_data is None else cell_data
|
||||
f = h5py.File(fname, 'r')
|
||||
|
||||
cells = f['/'.join((b,'_SIMPL_GEOMETRY','DIMENSIONS'))][()]
|
||||
size = f['/'.join((b,'_SIMPL_GEOMETRY','SPACING'))] * cells
|
||||
origin = f['/'.join((b,'_SIMPL_GEOMETRY','ORIGIN'))][()]
|
||||
cells = f['/'.join([b,'_SIMPL_GEOMETRY','DIMENSIONS'])][()]
|
||||
size = f['/'.join([b,'_SIMPL_GEOMETRY','SPACING'])] * cells
|
||||
origin = f['/'.join([b,'_SIMPL_GEOMETRY','ORIGIN'])][()]
|
||||
|
||||
if feature_IDs is None:
|
||||
phase = f['/'.join((b,c,phases))][()].reshape(-1,1)
|
||||
O = Rotation.from_Euler_angles(f['/'.join((b,c,Euler_angles))]).as_quaternion().reshape(-1,4) # noqa
|
||||
phase = f['/'.join([b,c,phases])][()].reshape(-1,1)
|
||||
O = Rotation.from_Euler_angles(f['/'.join([b,c,Euler_angles])]).as_quaternion().reshape(-1,4) # noqa
|
||||
unique,unique_inverse = np.unique(np.hstack([O,phase]),return_inverse=True,axis=0)
|
||||
ma = np.arange(cells.prod()) if len(unique) == cells.prod() else \
|
||||
np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse]
|
||||
else:
|
||||
ma = f['/'.join((b,c,feature_IDs))][()].flatten()
|
||||
ma = f['/'.join([b,c,feature_IDs])][()].flatten()
|
||||
|
||||
return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','load_DREAM3D'))
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ def _read(dataset):
|
|||
return np.array(dataset,dtype=dtype)
|
||||
|
||||
def _match(requested,existing):
|
||||
"""Find matches among two sets of labels"""
|
||||
"""Find matches among two sets of labels."""
|
||||
def flatten_list(list_of_lists):
|
||||
return [e for e_ in list_of_lists for e in e_]
|
||||
|
||||
|
@ -159,7 +159,7 @@ class Result:
|
|||
Select from 'set', 'add', and 'del'.
|
||||
what : str
|
||||
Attribute to change (must be from self.visible).
|
||||
datasets : list of str or bool
|
||||
datasets : str, int, list of str, list of int, or bool
|
||||
Name of datasets as list; supports ? and * wildcards.
|
||||
True is equivalent to [*], False is equivalent to [].
|
||||
|
||||
|
@ -169,7 +169,7 @@ class Result:
|
|||
datasets = '*'
|
||||
elif datasets is False or datasets is None:
|
||||
datasets = []
|
||||
choice = datasets if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
||||
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
||||
[datasets]
|
||||
|
||||
inc = 'inc' if self.version_minor < 12 else 'increment_' # compatibility hack
|
||||
|
@ -1095,7 +1095,7 @@ class Result:
|
|||
for ty in ['phase','homogenization']:
|
||||
for label in self.visible[ty+'s']:
|
||||
for field in self.visible['fields']:
|
||||
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()):
|
||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||
name = '/'.join([inc,ty,label,field,out])
|
||||
shape = f[name].shape[1:]
|
||||
dtype = f[name].dtype
|
||||
|
@ -1165,29 +1165,29 @@ class Result:
|
|||
at_cell_ph = []
|
||||
in_data_ph = []
|
||||
for c in range(self.N_constituents):
|
||||
at_cell_ph.append({label: np.where(f['/'.join((grp,'phase'))][:,c][name] == label.encode())[0] \
|
||||
at_cell_ph.append({label: np.where(f['/'.join([grp,'phase'])][:,c][name] == label.encode())[0] \
|
||||
for label in self.visible['phases']})
|
||||
in_data_ph.append({label: f['/'.join((grp,'phase'))][member][at_cell_ph[c][label]][:,c] \
|
||||
in_data_ph.append({label: f['/'.join([grp,'phase'])][member][at_cell_ph[c][label]][:,c] \
|
||||
for label in self.visible['phases']})
|
||||
|
||||
at_cell_ho = {label: np.where(f['/'.join((grp,'homogenization'))][:][name] == label.encode())[0] \
|
||||
at_cell_ho = {label: np.where(f['/'.join([grp,'homogenization'])][:][name] == label.encode())[0] \
|
||||
for label in self.visible['homogenizations']}
|
||||
in_data_ho = {label: f['/'.join((grp,'homogenization'))][member][at_cell_ho[label]] \
|
||||
in_data_ho = {label: f['/'.join([grp,'homogenization'])][member][at_cell_ho[label]] \
|
||||
for label in self.visible['homogenizations']}
|
||||
|
||||
for inc in util.show_progress(self.visible['increments']):
|
||||
|
||||
u = _read(f['/'.join((inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'))])
|
||||
u = _read(f['/'.join([inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'])])
|
||||
v.add(u,'u')
|
||||
|
||||
for ty in ['phase','homogenization']:
|
||||
for field in self.visible['fields']:
|
||||
for label in self.visible[ty+'s']:
|
||||
if field not in f['/'.join((inc,ty,label))].keys(): continue
|
||||
if field not in f['/'.join([inc,ty,label])].keys(): continue
|
||||
outs = {}
|
||||
|
||||
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()):
|
||||
data = ma.array(_read(f['/'.join((inc,ty,label,field,out))]))
|
||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||
data = ma.array(_read(f['/'.join([inc,ty,label,field,out])]))
|
||||
|
||||
if ty == 'phase':
|
||||
if out+suffixes[0] not in outs.keys():
|
||||
|
@ -1205,7 +1205,7 @@ class Result:
|
|||
outs[out][at_cell_ho[label]] = data[in_data_ho[label]]
|
||||
|
||||
for label,dataset in outs.items():
|
||||
v.add(dataset,' / '.join(('/'.join((ty,field,label)),dataset.dtype.metadata['unit'])))
|
||||
v.add(dataset,' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]))
|
||||
|
||||
v.save(f'{self.fname.stem}_inc{inc[ln:].zfill(N_digits)}')
|
||||
|
||||
|
@ -1236,16 +1236,16 @@ class Result:
|
|||
for inc in util.show_progress(self.visible['increments']):
|
||||
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
||||
|
||||
for out in _match(output,f['/'.join((inc,'geometry'))].keys()):
|
||||
r[inc]['geometry'][out] = _read(f['/'.join((inc,'geometry',out))])
|
||||
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
||||
r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
|
||||
|
||||
for ty in ['phase','homogenization']:
|
||||
for label in self.visible[ty+'s']:
|
||||
r[inc][ty][label] = {}
|
||||
for field in _match(self.visible['fields'],f['/'.join((inc,ty,label))].keys()):
|
||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||
r[inc][ty][label][field] = {}
|
||||
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()):
|
||||
r[inc][ty][label][field][out] = _read(f['/'.join((inc,ty,label,field,out))])
|
||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||
r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])])
|
||||
|
||||
if prune: r = util.dict_prune(r)
|
||||
if flatten: r = util.dict_flatten(r)
|
||||
|
@ -1303,30 +1303,30 @@ class Result:
|
|||
at_cell_ph = []
|
||||
in_data_ph = []
|
||||
for c in range(self.N_constituents):
|
||||
at_cell_ph.append({label: np.where(f['/'.join((grp,'phase'))][:,c][name] == label.encode())[0] \
|
||||
at_cell_ph.append({label: np.where(f['/'.join([grp,'phase'])][:,c][name] == label.encode())[0] \
|
||||
for label in self.visible['phases']})
|
||||
in_data_ph.append({label: f['/'.join((grp,'phase'))][member][at_cell_ph[c][label]][:,c] \
|
||||
in_data_ph.append({label: f['/'.join([grp,'phase'])][member][at_cell_ph[c][label]][:,c] \
|
||||
for label in self.visible['phases']})
|
||||
|
||||
at_cell_ho = {label: np.where(f['/'.join((grp,'homogenization'))][:][name] == label.encode())[0] \
|
||||
at_cell_ho = {label: np.where(f['/'.join([grp,'homogenization'])][:][name] == label.encode())[0] \
|
||||
for label in self.visible['homogenizations']}
|
||||
in_data_ho = {label: f['/'.join((grp,'homogenization'))][member][at_cell_ho[label]] \
|
||||
in_data_ho = {label: f['/'.join([grp,'homogenization'])][member][at_cell_ho[label]] \
|
||||
for label in self.visible['homogenizations']}
|
||||
|
||||
for inc in util.show_progress(self.visible['increments']):
|
||||
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
||||
|
||||
for out in _match(output,f['/'.join((inc,'geometry'))].keys()):
|
||||
r[inc]['geometry'][out] = _read(f['/'.join((inc,'geometry',out))])
|
||||
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
||||
r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
|
||||
|
||||
for ty in ['phase','homogenization']:
|
||||
for label in self.visible[ty+'s']:
|
||||
for field in _match(self.visible['fields'],f['/'.join((inc,ty,label))].keys()):
|
||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||
if field not in r[inc][ty].keys():
|
||||
r[inc][ty][field] = {}
|
||||
|
||||
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()):
|
||||
data = ma.array(_read(f['/'.join((inc,ty,label,field,out))]))
|
||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||
data = ma.array(_read(f['/'.join([inc,ty,label,field,out])]))
|
||||
|
||||
if ty == 'phase':
|
||||
if out+suffixes[0] not in r[inc][ty][field].keys():
|
||||
|
|
|
@ -27,20 +27,69 @@ class Table:
|
|||
self.comments = [] if comments_ is None else [c for c in comments_]
|
||||
self.data = pd.DataFrame(data=data)
|
||||
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
|
||||
self._label_uniform()
|
||||
self._relabel('uniform')
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""Brief overview."""
|
||||
return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__()
|
||||
self._relabel('shapes')
|
||||
data_repr = self.data.__repr__()
|
||||
self._relabel('uniform')
|
||||
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
||||
|
||||
|
||||
def __getitem__(self,item):
|
||||
"""Return slice according to item."""
|
||||
return self.__class__(data=self.data[item],shapes=self.shapes,comments=self.comments)
|
||||
"""
|
||||
Slice the Table according to item.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
item : row and/or column indexer
|
||||
Slice to select from Table.
|
||||
|
||||
Returns
|
||||
-------
|
||||
slice : Table
|
||||
Sliced part of the Table.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import damask
|
||||
>>> import numpy as np
|
||||
>>> tbl = damask.Table(data=np.arange(12).reshape((4,3)),
|
||||
... shapes=dict(colA=(1,),colB=(1,),colC=(1,)))
|
||||
>>> tbl['colA','colB']
|
||||
colA colB
|
||||
0 0 1
|
||||
1 3 4
|
||||
2 6 7
|
||||
3 9 10
|
||||
>>> tbl[::2,['colB','colA']]
|
||||
colB colA
|
||||
0 1 0
|
||||
2 7 6
|
||||
>>> tbl[1:2,'colB']
|
||||
colB
|
||||
1 4
|
||||
2 7
|
||||
|
||||
"""
|
||||
item = (item,slice(None,None,None)) if isinstance(item,slice) else \
|
||||
item if isinstance(item[0],slice) else \
|
||||
(slice(None,None,None),item)
|
||||
sliced = self.data.loc[item]
|
||||
cols = np.array(sliced.columns if isinstance(sliced,pd.core.frame.DataFrame) else [item[1]])
|
||||
_,idx = np.unique(cols,return_index=True)
|
||||
return self.__class__(data=sliced,
|
||||
shapes = {k:self.shapes[k] for k in cols[np.sort(idx)]},
|
||||
comments=self.comments)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
"""Number of rows."""
|
||||
return len(self.data)
|
||||
|
||||
|
||||
def __copy__(self):
|
||||
"""Create deep copy."""
|
||||
return copy.deepcopy(self)
|
||||
|
@ -48,21 +97,51 @@ class Table:
|
|||
copy = __copy__
|
||||
|
||||
|
||||
def _label_discrete(self):
|
||||
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
||||
def _label(self,what,how):
|
||||
"""
|
||||
Expand labels according to data shape.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
what : str or list
|
||||
Labels to expand.
|
||||
how : str
|
||||
Mode of labeling.
|
||||
'uniform' ==> v v v
|
||||
'shapes' ==> 3:v v v
|
||||
'linear' ==> 1_v 2_v 3_v
|
||||
|
||||
"""
|
||||
what = [what] if isinstance(what,str) else what
|
||||
labels = []
|
||||
for label,shape in self.shapes.items():
|
||||
size = int(np.prod(shape))
|
||||
for label in what:
|
||||
shape = self.shapes[label]
|
||||
size = np.prod(shape,dtype=int)
|
||||
if how == 'uniform':
|
||||
labels += [label] * size
|
||||
elif how == 'shapes':
|
||||
labels += [('' if size == 1 or i>0 else f'{util.srepr(shape,"x")}:')+label for i in range(size)]
|
||||
elif how == 'linear':
|
||||
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
|
||||
self.data.columns = labels
|
||||
else:
|
||||
raise KeyError
|
||||
return labels
|
||||
|
||||
|
||||
def _label_uniform(self):
|
||||
"""Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v."""
|
||||
labels = []
|
||||
for label,shape in self.shapes.items():
|
||||
labels += [label] * int(np.prod(shape))
|
||||
self.data.columns = labels
|
||||
def _relabel(self,how):
|
||||
"""
|
||||
Modify labeling of data in-place.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
how : str
|
||||
Mode of labeling.
|
||||
'uniform' ==> v v v
|
||||
'shapes' ==> 3:v v v
|
||||
'linear' ==> 1_v 2_v 3_v
|
||||
|
||||
"""
|
||||
self.data.columns = self._label(self.shapes,how)
|
||||
|
||||
|
||||
def _add_comment(self,label,shape,info):
|
||||
|
@ -72,6 +151,62 @@ class Table:
|
|||
self.comments.append(f'{specific} / {general}')
|
||||
|
||||
|
||||
def isclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
|
||||
"""
|
||||
Report where values are approximately equal to corresponding ones of other Table.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other : Table
|
||||
Table to compare against.
|
||||
rtol : float, optional
|
||||
Relative tolerance of equality.
|
||||
atol : float, optional
|
||||
Absolute tolerance of equality.
|
||||
equal_nan : bool, optional
|
||||
Consider matching NaN values as equal. Defaults to True.
|
||||
|
||||
Returns
|
||||
-------
|
||||
mask : numpy.ndarray bool
|
||||
Mask indicating where corresponding table values are close.
|
||||
|
||||
"""
|
||||
return np.isclose( self.data.to_numpy(),
|
||||
other.data.to_numpy(),
|
||||
rtol=rtol,
|
||||
atol=atol,
|
||||
equal_nan=equal_nan)
|
||||
|
||||
|
||||
def allclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
|
||||
"""
|
||||
Test whether all values are approximately equal to corresponding ones of other Table.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
other : Table
|
||||
Table to compare against.
|
||||
rtol : float, optional
|
||||
Relative tolerance of equality.
|
||||
atol : float, optional
|
||||
Absolute tolerance of equality.
|
||||
equal_nan : bool, optional
|
||||
Consider matching NaN values as equal. Defaults to True.
|
||||
|
||||
Returns
|
||||
-------
|
||||
answer : bool
|
||||
Whether corresponding values are close between both tables.
|
||||
|
||||
"""
|
||||
return np.allclose( self.data.to_numpy(),
|
||||
other.data.to_numpy(),
|
||||
rtol=rtol,
|
||||
atol=atol,
|
||||
equal_nan=equal_nan)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def load(fname):
|
||||
"""
|
||||
|
@ -120,12 +255,13 @@ class Table:
|
|||
|
||||
return Table(data,shapes,comments)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def load_ang(fname):
|
||||
"""
|
||||
Load from ang file.
|
||||
|
||||
A valid TSL ang file needs to contains the following columns:
|
||||
A valid TSL ang file has to have the following columns:
|
||||
|
||||
- Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
||||
- Spatial position in meters, 2 floats, label 'pos'.
|
||||
|
@ -186,10 +322,6 @@ class Table:
|
|||
Array of column data.
|
||||
|
||||
"""
|
||||
if re.match(r'[0-9]*?_',label):
|
||||
idx,key = label.split('_',1)
|
||||
data = self.data[key].to_numpy()[:,int(idx)-1].reshape(-1,1)
|
||||
else:
|
||||
data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label])
|
||||
|
||||
return data.astype(type(data.flatten()[0]))
|
||||
|
@ -216,10 +348,12 @@ class Table:
|
|||
"""
|
||||
dup = self.copy()
|
||||
dup._add_comment(label,data.shape[1:],info)
|
||||
|
||||
if re.match(r'[0-9]*?_',label):
|
||||
idx,key = label.split('_',1)
|
||||
iloc = dup.data.columns.get_loc(key).tolist().index(True) + int(idx) -1
|
||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
|
||||
if m:
|
||||
key = m.group(1)
|
||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
||||
self.shapes[key])
|
||||
iloc = dup.data.columns.get_loc(key).tolist().index(True) + idx
|
||||
dup.data.iloc[:,iloc] = data
|
||||
else:
|
||||
dup.data[label] = data.reshape(dup.data[label].shape)
|
||||
|
@ -322,10 +456,18 @@ class Table:
|
|||
Updated table.
|
||||
|
||||
"""
|
||||
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
||||
for i,l in enumerate(labels_):
|
||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
|
||||
if m:
|
||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
||||
self.shapes[m.group(1)])
|
||||
labels_[i] = f'{1+idx}_{m.group(1)}'
|
||||
|
||||
dup = self.copy()
|
||||
dup._label_discrete()
|
||||
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
||||
dup._label_uniform()
|
||||
dup._relabel('linear')
|
||||
dup.data.sort_values(labels_,axis=0,inplace=True,ascending=ascending)
|
||||
dup._relabel('uniform')
|
||||
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
||||
return dup
|
||||
|
||||
|
|
|
@ -399,7 +399,7 @@ def DREAM3D_cell_data_group(fname):
|
|||
"""
|
||||
base_group = DREAM3D_base_group(fname)
|
||||
with h5py.File(fname,'r') as f:
|
||||
cells = tuple(f['/'.join((base_group,'_SIMPL_GEOMETRY','DIMENSIONS'))][()][::-1])
|
||||
cells = tuple(f['/'.join([base_group,'_SIMPL_GEOMETRY','DIMENSIONS'])][()][::-1])
|
||||
cell_data_group = f[base_group].visititems(lambda path,obj: path.split('/')[0] \
|
||||
if isinstance(obj,h5py._hl.dataset.Dataset) and np.shape(obj)[:-1] == cells \
|
||||
else None)
|
||||
|
|
|
@ -2,6 +2,7 @@ import setuptools
|
|||
from pathlib import Path
|
||||
import re
|
||||
|
||||
# https://www.python.org/dev/peps/pep-0440
|
||||
with open(Path(__file__).parent/'damask/VERSION') as f:
|
||||
version = re.sub(r'(-([^-]*)).*$',r'.\2',re.sub(r'^v(\d+\.\d+(\.\d+)?)',r'\1',f.readline().strip()))
|
||||
|
||||
|
|
|
@ -86,9 +86,12 @@ class TestConfigMaterial:
|
|||
|
||||
def test_from_table(self):
|
||||
N = np.random.randint(3,10)
|
||||
a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2))).T
|
||||
t = Table(a,{'varying':1,'constant':4})
|
||||
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'4_constant'})
|
||||
a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),
|
||||
np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2),
|
||||
np.ones(N*2),
|
||||
)).T
|
||||
t = Table(a,{'varying':1,'constant':4,'ones':1})
|
||||
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'ones'})
|
||||
assert len(c['material']) == N
|
||||
for i,m in enumerate(c['material']):
|
||||
assert m['homogenization'] == 1 and (m['constituents'][0]['O'] == [1,0,1,1]).all()
|
||||
|
|
|
@ -407,7 +407,8 @@ class TestGrid:
|
|||
z=np.ones(cells.prod())
|
||||
z[cells[:2].prod()*int(cells[2]/2):]=0
|
||||
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
|
||||
g = Grid.from_table(t,'coords',['1_coords','z'])
|
||||
t = t.add('indicator',t.get('coords')[:,0])
|
||||
g = Grid.from_table(t,'coords',['indicator','z'])
|
||||
assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()
|
||||
|
||||
|
||||
|
|
|
@ -269,9 +269,8 @@ class TestResult:
|
|||
last = default.view('times',default.times_in_range(0,np.inf)[-1])
|
||||
|
||||
last.add_stress_Cauchy()
|
||||
with h5py.File(last.fname,'r') as f:
|
||||
created_first = default.place('sigma').dtype.metadata['created']
|
||||
|
||||
created_first = last.place('sigma').dtype.metadata['created']
|
||||
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||
|
||||
if overwrite == 'on':
|
||||
|
@ -284,7 +283,7 @@ class TestResult:
|
|||
last.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
||||
except ValueError:
|
||||
pass
|
||||
with h5py.File(last.fname,'r') as f:
|
||||
|
||||
created_second = last.place('sigma').dtype.metadata['created']
|
||||
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
||||
|
||||
|
|
|
@ -36,13 +36,33 @@ class TestTable:
|
|||
d = default.get('F')
|
||||
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
|
||||
|
||||
def test_get_component(self,default):
|
||||
d = default.get('5_F')
|
||||
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
|
||||
def test_set(self,default):
|
||||
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
||||
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
||||
|
||||
@pytest.mark.parametrize('N',[10,40])
|
||||
def test_getitem(self,N):
|
||||
assert len(Table(np.random.rand(N,1),{'X':1})[:N//2]) == N//2
|
||||
def test_set_component(self,default):
|
||||
d = default.set('F[0,0]',np.zeros((5)),'set to zero').get('F')
|
||||
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
|
||||
|
||||
def test_labels(self,default):
|
||||
assert default.labels == ['F','v','s']
|
||||
|
||||
def test_add(self,default):
|
||||
d = np.random.random((5,9))
|
||||
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
|
||||
|
||||
def test_isclose(self,default):
|
||||
assert default.isclose(default).all()
|
||||
|
||||
def test_allclose(self,default):
|
||||
assert default.allclose(default)
|
||||
|
||||
@pytest.mark.parametrize('N',[1,3,4])
|
||||
def test_slice(self,default,N):
|
||||
assert len(default[:N]) == 1+N
|
||||
assert len(default[:N,['F','s']]) == 1+N
|
||||
assert default[N:].get('F').shape == (len(default)-N,3,3)
|
||||
assert (default[:N,['v','s']].data == default['v','s'][:N].data).all().all()
|
||||
|
||||
@pytest.mark.parametrize('mode',['str','path'])
|
||||
def test_write_read(self,default,tmp_path,mode):
|
||||
|
@ -84,21 +104,6 @@ class TestTable:
|
|||
with open(ref_path/fname) as f:
|
||||
Table.load(f)
|
||||
|
||||
def test_set(self,default):
|
||||
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
||||
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
||||
|
||||
def test_set_component(self,default):
|
||||
d = default.set('1_F',np.zeros((5)),'set to zero').get('F')
|
||||
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
|
||||
|
||||
def test_labels(self,default):
|
||||
assert default.labels == ['F','v','s']
|
||||
|
||||
def test_add(self,default):
|
||||
d = np.random.random((5,9))
|
||||
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
|
||||
|
||||
def test_rename_equivalent(self):
|
||||
x = np.random.random((5,13))
|
||||
t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data'])
|
||||
|
@ -169,15 +174,15 @@ class TestTable:
|
|||
def test_sort_component(self):
|
||||
x = np.random.random((5,12))
|
||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||
unsort = t.get('4_F')
|
||||
sort = t.sort_by('4_F').get('4_F')
|
||||
unsort = t.get('F')[:,1,0]
|
||||
sort = t.sort_by('F[1,0]').get('F')[:,1,0]
|
||||
assert np.all(np.sort(unsort,0)==sort)
|
||||
|
||||
def test_sort_revert(self):
|
||||
x = np.random.random((5,12))
|
||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||
sort = t.sort_by('4_F',ascending=False).get('4_F')
|
||||
assert np.all(np.sort(sort,0)==sort[::-1,:])
|
||||
sort = t.sort_by('F[1,0]',ascending=False).get('F')[:,1,0]
|
||||
assert np.all(np.sort(sort,0)==sort[::-1])
|
||||
|
||||
def test_sort(self):
|
||||
t = Table(np.array([[0,1,],[2,1,]]),
|
||||
|
@ -185,4 +190,4 @@ class TestTable:
|
|||
['test data'])\
|
||||
.add('s',np.array(['b','a']))\
|
||||
.sort_by('s')
|
||||
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
||||
assert np.all(t.get('v')[:,0] == np.array([2,0]))
|
||||
|
|
|
@ -20,16 +20,10 @@ module YAML_types
|
|||
procedure(asFormattedString), deferred :: asFormattedString
|
||||
procedure :: &
|
||||
asScalar => tNode_asScalar
|
||||
procedure :: &
|
||||
isScalar => tNode_isScalar
|
||||
procedure :: &
|
||||
asList => tNode_asList
|
||||
procedure :: &
|
||||
isList => tNode_isList
|
||||
procedure :: &
|
||||
asDict => tNode_asDict
|
||||
procedure :: &
|
||||
isDict => tNode_isDict
|
||||
procedure :: &
|
||||
tNode_get_byIndex => tNode_get_byIndex
|
||||
procedure :: &
|
||||
|
@ -67,9 +61,9 @@ module YAML_types
|
|||
procedure :: &
|
||||
tNode_get_byKey_as1dString => tNode_get_byKey_as1dString
|
||||
procedure :: &
|
||||
getIndex => tNode_get_byKey_asIndex
|
||||
getKey => tNode_get_byIndex_asKey
|
||||
procedure :: &
|
||||
getKey => tNode_getKey_byIndex
|
||||
getIndex => tNode_get_byKey_asIndex
|
||||
procedure :: &
|
||||
contains => tNode_contains
|
||||
procedure :: &
|
||||
|
@ -341,8 +335,6 @@ function tNode_asScalar(self) result(scalar)
|
|||
select type(self)
|
||||
class is(tScalar)
|
||||
scalar => self
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected "scalar"')
|
||||
end select
|
||||
|
||||
end function tNode_asScalar
|
||||
|
@ -359,8 +351,6 @@ function tNode_asList(self) result(list)
|
|||
select type(self)
|
||||
class is(tList)
|
||||
list => self
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected "list"')
|
||||
end select
|
||||
|
||||
end function tNode_asList
|
||||
|
@ -377,64 +367,11 @@ function tNode_asDict(self) result(dict)
|
|||
select type(self)
|
||||
class is(tDict)
|
||||
dict => self
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected "dict"')
|
||||
end select
|
||||
|
||||
end function tNode_asDict
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Checks if node is a scalar
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function tNode_isScalar(self) result(scalar)
|
||||
|
||||
class(tNode), intent(in), target :: self
|
||||
logical :: scalar
|
||||
|
||||
scalar = .false.
|
||||
select type(self)
|
||||
class is(tScalar)
|
||||
scalar = .true.
|
||||
end select
|
||||
|
||||
end function tNode_isScalar
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Checks if node is a list
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function tNode_isList(self) result(list)
|
||||
|
||||
class(tNode), intent(in), target :: self
|
||||
logical :: list
|
||||
|
||||
list = .false.
|
||||
select type(self)
|
||||
class is(tList)
|
||||
list = .true.
|
||||
end select
|
||||
|
||||
end function tNode_isList
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Checks if node is a dict
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function tNode_isDict(self) result(dict)
|
||||
|
||||
class(tNode), intent(in), target :: self
|
||||
logical :: dict
|
||||
|
||||
dict = .false.
|
||||
select type(self)
|
||||
class is(tDict)
|
||||
dict = .true.
|
||||
end select
|
||||
|
||||
end function tNode_isDict
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Access by index
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -448,14 +385,19 @@ function tNode_get_byIndex(self,i) result(node)
|
|||
class(tItem), pointer :: item
|
||||
integer :: j
|
||||
|
||||
select type(self)
|
||||
class is(tList)
|
||||
self_ => self%asList()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list')
|
||||
endselect
|
||||
|
||||
item => self_%first
|
||||
|
||||
if (i < 1 .or. i > self_%length) call IO_error(150,ext_msg='tNode_get_byIndex')
|
||||
|
||||
j = 1
|
||||
item => self_%first
|
||||
do while(j<i)
|
||||
do j = 2,i
|
||||
item => item%next
|
||||
j = j + 1
|
||||
enddo
|
||||
node => item%node
|
||||
|
||||
|
@ -475,8 +417,13 @@ function tNode_get_byIndex_asFloat(self,i) result(nodeAsFloat)
|
|||
type(tScalar), pointer :: scalar
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsFloat = scalar%asFloat()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar float')
|
||||
end select
|
||||
|
||||
end function tNode_get_byIndex_asFloat
|
||||
|
||||
|
@ -494,8 +441,13 @@ function tNode_get_byIndex_asInt(self,i) result(nodeAsInt)
|
|||
type(tScalar), pointer :: scalar
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsInt = scalar%asInt()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar integer')
|
||||
end select
|
||||
|
||||
end function tNode_get_byIndex_asInt
|
||||
|
||||
|
@ -513,8 +465,13 @@ function tNode_get_byIndex_asBool(self,i) result(nodeAsBool)
|
|||
type(tScalar), pointer :: scalar
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsBool = scalar%asBool()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar Boolean')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_asBool
|
||||
|
||||
|
@ -532,8 +489,13 @@ function tNode_get_byIndex_asString(self,i) result(nodeAsString)
|
|||
type(tScalar), pointer :: scalar
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsString = scalar%asString()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar string')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_asString
|
||||
|
||||
|
@ -551,8 +513,13 @@ function tNode_get_byIndex_as1dFloat(self,i) result(nodeAs1dFloat)
|
|||
class(tList), pointer :: list
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dFloat = list%as1dFloat()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list of floats')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_as1dFloat
|
||||
|
||||
|
@ -570,8 +537,13 @@ function tNode_get_byIndex_as1dInt(self,i) result(nodeAs1dInt)
|
|||
class(tList), pointer :: list
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dInt = list%as1dInt()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list of integers')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_as1dInt
|
||||
|
||||
|
@ -589,8 +561,13 @@ function tNode_get_byIndex_as1dBool(self,i) result(nodeAs1dBool)
|
|||
class(tList), pointer :: list
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dBool = list%as1dBool()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list of Booleans')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_as1dBool
|
||||
|
||||
|
@ -608,8 +585,13 @@ function tNode_get_byIndex_as1dString(self,i) result(nodeAs1dString)
|
|||
type(tList), pointer :: list
|
||||
|
||||
node => self%get(i)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dString = list%as1dString()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list of strings')
|
||||
endselect
|
||||
|
||||
end function tNode_get_byIndex_as1dString
|
||||
|
||||
|
@ -617,7 +599,7 @@ end function tNode_get_byIndex_as1dString
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Returns the key in a dictionary as a string
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function tNode_getKey_byIndex(self,i) result(key)
|
||||
function tNode_get_byIndex_asKey(self,i) result(key)
|
||||
|
||||
class(tNode), intent(in), target :: self
|
||||
integer, intent(in) :: i
|
||||
|
@ -627,18 +609,20 @@ function tNode_getKey_byIndex(self,i) result(key)
|
|||
type(tDict), pointer :: dict
|
||||
type(tItem), pointer :: item
|
||||
|
||||
select type(self)
|
||||
class is(tDict)
|
||||
dict => self%asDict()
|
||||
item => dict%first
|
||||
do j = 1, dict%length
|
||||
if (j == i) then
|
||||
key = item%key
|
||||
exit
|
||||
else
|
||||
do j = 1, min(i,dict%length)-1
|
||||
item => item%next
|
||||
endif
|
||||
enddo
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected dict')
|
||||
endselect
|
||||
|
||||
end function tNode_getKey_byIndex
|
||||
key = item%key
|
||||
|
||||
end function tNode_get_byIndex_asKey
|
||||
|
||||
|
||||
!-------------------------------------------------------------------------------------------------
|
||||
|
@ -655,7 +639,8 @@ function tNode_contains(self,k) result(exists)
|
|||
type(tDict), pointer :: dict
|
||||
|
||||
exists = .false.
|
||||
if (self%isDict()) then
|
||||
select type(self)
|
||||
class is(tDict)
|
||||
dict => self%asDict()
|
||||
do j=1, dict%length
|
||||
if (dict%getKey(j) == k) then
|
||||
|
@ -663,7 +648,7 @@ function tNode_contains(self,k) result(exists)
|
|||
return
|
||||
endif
|
||||
enddo
|
||||
elseif (self%isList()) then
|
||||
class is(tList)
|
||||
list => self%asList()
|
||||
do j=1, list%length
|
||||
if (list%get_asString(j) == k) then
|
||||
|
@ -671,9 +656,9 @@ function tNode_contains(self,k) result(exists)
|
|||
return
|
||||
endif
|
||||
enddo
|
||||
else
|
||||
call IO_error(706,ext_msg='Expected "list" or "dict"')
|
||||
endif
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected list or dict')
|
||||
endselect
|
||||
|
||||
end function tNode_contains
|
||||
|
||||
|
@ -696,7 +681,12 @@ function tNode_get_byKey(self,k,defaultVal) result(node)
|
|||
found = present(defaultVal)
|
||||
if (found) node => defaultVal
|
||||
|
||||
select type(self)
|
||||
class is(tDict)
|
||||
self_ => self%asDict()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected dict for key '//k)
|
||||
endselect
|
||||
|
||||
j = 1
|
||||
item => self_%first
|
||||
|
@ -733,8 +723,13 @@ function tNode_get_byKey_asFloat(self,k,defaultVal) result(nodeAsFloat)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsFloat = scalar%asFloat()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar float for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAsFloat = defaultVal
|
||||
else
|
||||
|
@ -759,8 +754,13 @@ function tNode_get_byKey_asInt(self,k,defaultVal) result(nodeAsInt)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsInt = scalar%asInt()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar integer for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAsInt = defaultVal
|
||||
else
|
||||
|
@ -785,8 +785,13 @@ function tNode_get_byKey_asBool(self,k,defaultVal) result(nodeAsBool)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsBool = scalar%asBool()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar Boolean for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAsBool = defaultVal
|
||||
else
|
||||
|
@ -811,8 +816,13 @@ function tNode_get_byKey_asString(self,k,defaultVal) result(nodeAsString)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tScalar)
|
||||
scalar => node%asScalar()
|
||||
nodeAsString = scalar%asString()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected scalar string for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAsString = defaultVal
|
||||
else
|
||||
|
@ -839,8 +849,13 @@ function tNode_get_byKey_as1dFloat(self,k,defaultVal,requiredSize) result(nodeAs
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(self)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dFloat = list%as1dFloat()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected 1D float array for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAs1dFloat = defaultVal
|
||||
else
|
||||
|
@ -870,8 +885,13 @@ function tNode_get_byKey_as2dFloat(self,k,defaultVal) result(nodeAs2dFloat)
|
|||
|
||||
if(self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
rows => node%asList()
|
||||
nodeAs2dFloat = rows%as2dFloat()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected 2D float array for key '//k)
|
||||
endselect
|
||||
elseif(present(defaultVal)) then
|
||||
nodeAs2dFloat = defaultVal
|
||||
else
|
||||
|
@ -897,8 +917,13 @@ function tNode_get_byKey_as1dInt(self,k,defaultVal,requiredSize) result(nodeAs1d
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dInt = list%as1dInt()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected 1D integer array for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAs1dInt = defaultVal
|
||||
else
|
||||
|
@ -927,8 +952,13 @@ function tNode_get_byKey_as1dBool(self,k,defaultVal) result(nodeAs1dBool)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dBool = list%as1dBool()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected 1D Boolean array for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAs1dBool = defaultVal
|
||||
else
|
||||
|
@ -953,8 +983,13 @@ function tNode_get_byKey_as1dString(self,k,defaultVal) result(nodeAs1dString)
|
|||
|
||||
if (self%contains(k)) then
|
||||
node => self%get(k)
|
||||
select type(node)
|
||||
class is(tList)
|
||||
list => node%asList()
|
||||
nodeAs1dString = list%as1dString()
|
||||
class default
|
||||
call IO_error(706,ext_msg='Expected 1D string array for key '//k)
|
||||
endselect
|
||||
elseif (present(defaultVal)) then
|
||||
nodeAs1dString = defaultVal
|
||||
else
|
||||
|
@ -981,7 +1016,6 @@ function output_as1dString(self) result(output) !ToDo: SR: Re
|
|||
output(o) = output_list%get_asString(o)
|
||||
enddo
|
||||
|
||||
|
||||
end function output_as1dString
|
||||
|
||||
|
||||
|
@ -994,24 +1028,18 @@ function tNode_get_byKey_asIndex(self,key) result(keyIndex)
|
|||
character(len=*), intent(in) :: key
|
||||
|
||||
integer :: keyIndex
|
||||
integer :: i
|
||||
type(tDict), pointer :: dict
|
||||
type(tItem), pointer :: item
|
||||
|
||||
dict => self%asDict()
|
||||
item => dict%first
|
||||
keyIndex = -1
|
||||
do i = 1, dict%length
|
||||
if (key == item%key) then
|
||||
keyIndex = i
|
||||
exit
|
||||
else
|
||||
keyIndex = 1
|
||||
do while (associated(item%next) .and. item%key /= key)
|
||||
item => item%next
|
||||
endif
|
||||
keyIndex = keyIndex+1
|
||||
enddo
|
||||
|
||||
if (keyIndex == -1) call IO_error(140,ext_msg=key)
|
||||
|
||||
if (item%key /= key) call IO_error(140,ext_msg=key)
|
||||
|
||||
end function tNode_get_byKey_asIndex
|
||||
|
||||
|
@ -1320,7 +1348,7 @@ subroutine tDict_set(self,key,node)
|
|||
if (item%key == key) exit
|
||||
item => item%next
|
||||
end do searchExisting
|
||||
if (.not. item%key == key) then
|
||||
if (item%key /= key) then
|
||||
allocate(item%next)
|
||||
item => item%next
|
||||
self%length = self%length + 1
|
||||
|
|
Loading…
Reference in New Issue