Merge remote-tracking branch 'origin/development' into Results.read-.place

This commit is contained in:
Martin Diehl 2021-04-05 10:13:08 +02:00
commit 65b03aeb2d
15 changed files with 443 additions and 263 deletions

View File

@ -96,7 +96,7 @@ processing:
stage: python stage: python
script: script:
- cd $DAMASKROOT/python - cd $DAMASKROOT/python
- pytest --basetemp=${TESTROOT}/python -v --cov --cov-report=term - COLUMNS=256 pytest --basetemp=${TESTROOT}/python -v --cov --cov-report=term
- coverage report --fail-under=90 - coverage report --fail-under=90
except: except:
- master - master
@ -116,7 +116,7 @@ compile_grid_Intel:
script: script:
- module load $IntelCompiler $MPI_Intel $PETSc_Intel - module load $IntelCompiler $MPI_Intel $PETSc_Intel
- cd pytest - cd pytest
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel - COLUMNS=256 pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel
except: except:
- master - master
- release - release
@ -126,7 +126,7 @@ compile_mesh_Intel:
script: script:
- module load $IntelCompiler $MPI_Intel $PETSc_Intel - module load $IntelCompiler $MPI_Intel $PETSc_Intel
- cd pytest - cd pytest
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel - COLUMNS=256 pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel
except: except:
- master - master
- release - release
@ -136,7 +136,7 @@ compile_grid_GNU:
script: script:
- module load $GNUCompiler $MPI_GNU $PETSc_GNU - module load $GNUCompiler $MPI_GNU $PETSc_GNU
- cd pytest - cd pytest
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU - COLUMNS=256 pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU
except: except:
- master - master
- release - release
@ -146,7 +146,7 @@ compile_mesh_GNU:
script: script:
- module load $GNUCompiler $MPI_GNU $PETSc_GNU - module load $GNUCompiler $MPI_GNU $PETSc_GNU
- cd pytest - cd pytest
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU - COLUMNS=256 pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU
except: except:
- master - master
- release - release
@ -156,7 +156,7 @@ compile_Marc:
script: script:
- module load $IntelMarc $HDF5Marc $MSC - module load $IntelMarc $HDF5Marc $MSC
- cd pytest - cd pytest
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc - COLUMNS=256 pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
except: except:
- master - master
- release - release
@ -191,7 +191,7 @@ core:
script: script:
- module load $IntelCompiler $MPI_Intel $PETSc_Intel - module load $IntelCompiler $MPI_Intel $PETSc_Intel
- cd pytest - cd pytest
- pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v - COLUMNS=256 pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v
except: except:
- master - master
- release - release

@ -1 +1 @@
Subproject commit 4511a963da5094db309a6a68783f24a23c76da81 Subproject commit 4ce1f786dc2a613f29b2c8681fcf751d6803d38e

View File

@ -1 +1 @@
v3.0.0-alpha2-662-gb36ff26cb v3.0.0-alpha2-753-g565dab120

1
python/.gitignore vendored
View File

@ -2,3 +2,4 @@
dist dist
damask.egg-info damask.egg-info
.coverage .coverage
.coverage.*

View File

@ -157,18 +157,18 @@ class ConfigMaterial(Config):
f = h5py.File(fname,'r') f = h5py.File(fname,'r')
if grain_data is None: if grain_data is None:
phase = f['/'.join((b,c,phases))][()].flatten() phase = f['/'.join([b,c,phases])][()].flatten()
O = Rotation.from_Euler_angles(f['/'.join((b,c,Euler_angles))]).as_quaternion().reshape(-1,4) # noqa O = Rotation.from_Euler_angles(f['/'.join([b,c,Euler_angles])]).as_quaternion().reshape(-1,4) # noqa
_,idx = np.unique(np.hstack([O,phase.reshape(-1,1)]),return_index=True,axis=0) _,idx = np.unique(np.hstack([O,phase.reshape(-1,1)]),return_index=True,axis=0)
idx = np.sort(idx) idx = np.sort(idx)
else: else:
phase = f['/'.join((b,grain_data,phases))][()] phase = f['/'.join([b,grain_data,phases])][()]
O = Rotation.from_Euler_angles(f['/'.join((b,grain_data,Euler_angles))]).as_quaternion() # noqa O = Rotation.from_Euler_angles(f['/'.join([b,grain_data,Euler_angles])]).as_quaternion() # noqa
idx = np.arange(phase.size) idx = np.arange(phase.size)
if cell_ensemble_data is not None and phase_names is not None: if cell_ensemble_data is not None and phase_names is not None:
try: try:
names = np.array([s.decode() for s in f['/'.join((b,cell_ensemble_data,phase_names))]]) names = np.array([s.decode() for s in f['/'.join([b,cell_ensemble_data,phase_names])]])
phase = names[phase] phase = names[phase]
except KeyError: except KeyError:
pass pass

View File

@ -305,18 +305,18 @@ class Grid:
c = util.DREAM3D_cell_data_group(fname) if cell_data is None else cell_data c = util.DREAM3D_cell_data_group(fname) if cell_data is None else cell_data
f = h5py.File(fname, 'r') f = h5py.File(fname, 'r')
cells = f['/'.join((b,'_SIMPL_GEOMETRY','DIMENSIONS'))][()] cells = f['/'.join([b,'_SIMPL_GEOMETRY','DIMENSIONS'])][()]
size = f['/'.join((b,'_SIMPL_GEOMETRY','SPACING'))] * cells size = f['/'.join([b,'_SIMPL_GEOMETRY','SPACING'])] * cells
origin = f['/'.join((b,'_SIMPL_GEOMETRY','ORIGIN'))][()] origin = f['/'.join([b,'_SIMPL_GEOMETRY','ORIGIN'])][()]
if feature_IDs is None: if feature_IDs is None:
phase = f['/'.join((b,c,phases))][()].reshape(-1,1) phase = f['/'.join([b,c,phases])][()].reshape(-1,1)
O = Rotation.from_Euler_angles(f['/'.join((b,c,Euler_angles))]).as_quaternion().reshape(-1,4) # noqa O = Rotation.from_Euler_angles(f['/'.join([b,c,Euler_angles])]).as_quaternion().reshape(-1,4) # noqa
unique,unique_inverse = np.unique(np.hstack([O,phase]),return_inverse=True,axis=0) unique,unique_inverse = np.unique(np.hstack([O,phase]),return_inverse=True,axis=0)
ma = np.arange(cells.prod()) if len(unique) == cells.prod() else \ ma = np.arange(cells.prod()) if len(unique) == cells.prod() else \
np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse] np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse]
else: else:
ma = f['/'.join((b,c,feature_IDs))][()].flatten() ma = f['/'.join([b,c,feature_IDs])][()].flatten()
return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','load_DREAM3D')) return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','load_DREAM3D'))

View File

@ -34,7 +34,7 @@ def _read(dataset):
return np.array(dataset,dtype=dtype) return np.array(dataset,dtype=dtype)
def _match(requested,existing): def _match(requested,existing):
"""Find matches among two sets of labels""" """Find matches among two sets of labels."""
def flatten_list(list_of_lists): def flatten_list(list_of_lists):
return [e for e_ in list_of_lists for e in e_] return [e for e_ in list_of_lists for e in e_]
@ -159,7 +159,7 @@ class Result:
Select from 'set', 'add', and 'del'. Select from 'set', 'add', and 'del'.
what : str what : str
Attribute to change (must be from self.visible). Attribute to change (must be from self.visible).
datasets : list of str or bool datasets : str, int, list of str, list of int, or bool
Name of datasets as list; supports ? and * wildcards. Name of datasets as list; supports ? and * wildcards.
True is equivalent to [*], False is equivalent to []. True is equivalent to [*], False is equivalent to [].
@ -169,7 +169,7 @@ class Result:
datasets = '*' datasets = '*'
elif datasets is False or datasets is None: elif datasets is False or datasets is None:
datasets = [] datasets = []
choice = datasets if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \ choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
[datasets] [datasets]
inc = 'inc' if self.version_minor < 12 else 'increment_' # compatibility hack inc = 'inc' if self.version_minor < 12 else 'increment_' # compatibility hack
@ -1095,7 +1095,7 @@ class Result:
for ty in ['phase','homogenization']: for ty in ['phase','homogenization']:
for label in self.visible[ty+'s']: for label in self.visible[ty+'s']:
for field in self.visible['fields']: for field in self.visible['fields']:
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()): for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
name = '/'.join([inc,ty,label,field,out]) name = '/'.join([inc,ty,label,field,out])
shape = f[name].shape[1:] shape = f[name].shape[1:]
dtype = f[name].dtype dtype = f[name].dtype
@ -1165,29 +1165,29 @@ class Result:
at_cell_ph = [] at_cell_ph = []
in_data_ph = [] in_data_ph = []
for c in range(self.N_constituents): for c in range(self.N_constituents):
at_cell_ph.append({label: np.where(f['/'.join((grp,'phase'))][:,c][name] == label.encode())[0] \ at_cell_ph.append({label: np.where(f['/'.join([grp,'phase'])][:,c][name] == label.encode())[0] \
for label in self.visible['phases']}) for label in self.visible['phases']})
in_data_ph.append({label: f['/'.join((grp,'phase'))][member][at_cell_ph[c][label]][:,c] \ in_data_ph.append({label: f['/'.join([grp,'phase'])][member][at_cell_ph[c][label]][:,c] \
for label in self.visible['phases']}) for label in self.visible['phases']})
at_cell_ho = {label: np.where(f['/'.join((grp,'homogenization'))][:][name] == label.encode())[0] \ at_cell_ho = {label: np.where(f['/'.join([grp,'homogenization'])][:][name] == label.encode())[0] \
for label in self.visible['homogenizations']} for label in self.visible['homogenizations']}
in_data_ho = {label: f['/'.join((grp,'homogenization'))][member][at_cell_ho[label]] \ in_data_ho = {label: f['/'.join([grp,'homogenization'])][member][at_cell_ho[label]] \
for label in self.visible['homogenizations']} for label in self.visible['homogenizations']}
for inc in util.show_progress(self.visible['increments']): for inc in util.show_progress(self.visible['increments']):
u = _read(f['/'.join((inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'))]) u = _read(f['/'.join([inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'])])
v.add(u,'u') v.add(u,'u')
for ty in ['phase','homogenization']: for ty in ['phase','homogenization']:
for field in self.visible['fields']: for field in self.visible['fields']:
for label in self.visible[ty+'s']: for label in self.visible[ty+'s']:
if field not in f['/'.join((inc,ty,label))].keys(): continue if field not in f['/'.join([inc,ty,label])].keys(): continue
outs = {} outs = {}
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()): for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
data = ma.array(_read(f['/'.join((inc,ty,label,field,out))])) data = ma.array(_read(f['/'.join([inc,ty,label,field,out])]))
if ty == 'phase': if ty == 'phase':
if out+suffixes[0] not in outs.keys(): if out+suffixes[0] not in outs.keys():
@ -1205,7 +1205,7 @@ class Result:
outs[out][at_cell_ho[label]] = data[in_data_ho[label]] outs[out][at_cell_ho[label]] = data[in_data_ho[label]]
for label,dataset in outs.items(): for label,dataset in outs.items():
v.add(dataset,' / '.join(('/'.join((ty,field,label)),dataset.dtype.metadata['unit']))) v.add(dataset,' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]))
v.save(f'{self.fname.stem}_inc{inc[ln:].zfill(N_digits)}') v.save(f'{self.fname.stem}_inc{inc[ln:].zfill(N_digits)}')
@ -1236,16 +1236,16 @@ class Result:
for inc in util.show_progress(self.visible['increments']): for inc in util.show_progress(self.visible['increments']):
r[inc] = {'phase':{},'homogenization':{},'geometry':{}} r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
for out in _match(output,f['/'.join((inc,'geometry'))].keys()): for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
r[inc]['geometry'][out] = _read(f['/'.join((inc,'geometry',out))]) r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
for ty in ['phase','homogenization']: for ty in ['phase','homogenization']:
for label in self.visible[ty+'s']: for label in self.visible[ty+'s']:
r[inc][ty][label] = {} r[inc][ty][label] = {}
for field in _match(self.visible['fields'],f['/'.join((inc,ty,label))].keys()): for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
r[inc][ty][label][field] = {} r[inc][ty][label][field] = {}
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()): for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
r[inc][ty][label][field][out] = _read(f['/'.join((inc,ty,label,field,out))]) r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])])
if prune: r = util.dict_prune(r) if prune: r = util.dict_prune(r)
if flatten: r = util.dict_flatten(r) if flatten: r = util.dict_flatten(r)
@ -1303,30 +1303,30 @@ class Result:
at_cell_ph = [] at_cell_ph = []
in_data_ph = [] in_data_ph = []
for c in range(self.N_constituents): for c in range(self.N_constituents):
at_cell_ph.append({label: np.where(f['/'.join((grp,'phase'))][:,c][name] == label.encode())[0] \ at_cell_ph.append({label: np.where(f['/'.join([grp,'phase'])][:,c][name] == label.encode())[0] \
for label in self.visible['phases']}) for label in self.visible['phases']})
in_data_ph.append({label: f['/'.join((grp,'phase'))][member][at_cell_ph[c][label]][:,c] \ in_data_ph.append({label: f['/'.join([grp,'phase'])][member][at_cell_ph[c][label]][:,c] \
for label in self.visible['phases']}) for label in self.visible['phases']})
at_cell_ho = {label: np.where(f['/'.join((grp,'homogenization'))][:][name] == label.encode())[0] \ at_cell_ho = {label: np.where(f['/'.join([grp,'homogenization'])][:][name] == label.encode())[0] \
for label in self.visible['homogenizations']} for label in self.visible['homogenizations']}
in_data_ho = {label: f['/'.join((grp,'homogenization'))][member][at_cell_ho[label]] \ in_data_ho = {label: f['/'.join([grp,'homogenization'])][member][at_cell_ho[label]] \
for label in self.visible['homogenizations']} for label in self.visible['homogenizations']}
for inc in util.show_progress(self.visible['increments']): for inc in util.show_progress(self.visible['increments']):
r[inc] = {'phase':{},'homogenization':{},'geometry':{}} r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
for out in _match(output,f['/'.join((inc,'geometry'))].keys()): for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
r[inc]['geometry'][out] = _read(f['/'.join((inc,'geometry',out))]) r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
for ty in ['phase','homogenization']: for ty in ['phase','homogenization']:
for label in self.visible[ty+'s']: for label in self.visible[ty+'s']:
for field in _match(self.visible['fields'],f['/'.join((inc,ty,label))].keys()): for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
if field not in r[inc][ty].keys(): if field not in r[inc][ty].keys():
r[inc][ty][field] = {} r[inc][ty][field] = {}
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()): for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
data = ma.array(_read(f['/'.join((inc,ty,label,field,out))])) data = ma.array(_read(f['/'.join([inc,ty,label,field,out])]))
if ty == 'phase': if ty == 'phase':
if out+suffixes[0] not in r[inc][ty][field].keys(): if out+suffixes[0] not in r[inc][ty][field].keys():

View File

@ -27,20 +27,69 @@ class Table:
self.comments = [] if comments_ is None else [c for c in comments_] self.comments = [] if comments_ is None else [c for c in comments_]
self.data = pd.DataFrame(data=data) self.data = pd.DataFrame(data=data)
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() } self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
self._label_uniform() self._relabel('uniform')
def __repr__(self): def __repr__(self):
"""Brief overview.""" """Brief overview."""
return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__() self._relabel('shapes')
data_repr = self.data.__repr__()
self._relabel('uniform')
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
def __getitem__(self,item): def __getitem__(self,item):
"""Return slice according to item.""" """
return self.__class__(data=self.data[item],shapes=self.shapes,comments=self.comments) Slice the Table according to item.
Parameters
----------
item : row and/or column indexer
Slice to select from Table.
Returns
-------
slice : Table
Sliced part of the Table.
Examples
--------
>>> import damask
>>> import numpy as np
>>> tbl = damask.Table(data=np.arange(12).reshape((4,3)),
... shapes=dict(colA=(1,),colB=(1,),colC=(1,)))
>>> tbl['colA','colB']
colA colB
0 0 1
1 3 4
2 6 7
3 9 10
>>> tbl[::2,['colB','colA']]
colB colA
0 1 0
2 7 6
>>> tbl[1:2,'colB']
colB
1 4
2 7
"""
item = (item,slice(None,None,None)) if isinstance(item,slice) else \
item if isinstance(item[0],slice) else \
(slice(None,None,None),item)
sliced = self.data.loc[item]
cols = np.array(sliced.columns if isinstance(sliced,pd.core.frame.DataFrame) else [item[1]])
_,idx = np.unique(cols,return_index=True)
return self.__class__(data=sliced,
shapes = {k:self.shapes[k] for k in cols[np.sort(idx)]},
comments=self.comments)
def __len__(self): def __len__(self):
"""Number of rows.""" """Number of rows."""
return len(self.data) return len(self.data)
def __copy__(self): def __copy__(self):
"""Create deep copy.""" """Create deep copy."""
return copy.deepcopy(self) return copy.deepcopy(self)
@ -48,21 +97,51 @@ class Table:
copy = __copy__ copy = __copy__
def _label_discrete(self): def _label(self,what,how):
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v.""" """
Expand labels according to data shape.
Parameters
----------
what : str or list
Labels to expand.
how : str
Mode of labeling.
'uniform' ==> v v v
'shapes' ==> 3:v v v
'linear' ==> 1_v 2_v 3_v
"""
what = [what] if isinstance(what,str) else what
labels = [] labels = []
for label,shape in self.shapes.items(): for label in what:
size = int(np.prod(shape)) shape = self.shapes[label]
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)] size = np.prod(shape,dtype=int)
self.data.columns = labels if how == 'uniform':
labels += [label] * size
elif how == 'shapes':
labels += [('' if size == 1 or i>0 else f'{util.srepr(shape,"x")}:')+label for i in range(size)]
elif how == 'linear':
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
else:
raise KeyError
return labels
def _label_uniform(self): def _relabel(self,how):
"""Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v.""" """
labels = [] Modify labeling of data in-place.
for label,shape in self.shapes.items():
labels += [label] * int(np.prod(shape)) Parameters
self.data.columns = labels ----------
how : str
Mode of labeling.
'uniform' ==> v v v
'shapes' ==> 3:v v v
'linear' ==> 1_v 2_v 3_v
"""
self.data.columns = self._label(self.shapes,how)
def _add_comment(self,label,shape,info): def _add_comment(self,label,shape,info):
@ -72,6 +151,62 @@ class Table:
self.comments.append(f'{specific} / {general}') self.comments.append(f'{specific} / {general}')
def isclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
"""
Report where values are approximately equal to corresponding ones of other Table.
Parameters
----------
other : Table
Table to compare against.
rtol : float, optional
Relative tolerance of equality.
atol : float, optional
Absolute tolerance of equality.
equal_nan : bool, optional
Consider matching NaN values as equal. Defaults to True.
Returns
-------
mask : numpy.ndarray bool
Mask indicating where corresponding table values are close.
"""
return np.isclose( self.data.to_numpy(),
other.data.to_numpy(),
rtol=rtol,
atol=atol,
equal_nan=equal_nan)
def allclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
"""
Test whether all values are approximately equal to corresponding ones of other Table.
Parameters
----------
other : Table
Table to compare against.
rtol : float, optional
Relative tolerance of equality.
atol : float, optional
Absolute tolerance of equality.
equal_nan : bool, optional
Consider matching NaN values as equal. Defaults to True.
Returns
-------
answer : bool
Whether corresponding values are close between both tables.
"""
return np.allclose( self.data.to_numpy(),
other.data.to_numpy(),
rtol=rtol,
atol=atol,
equal_nan=equal_nan)
@staticmethod @staticmethod
def load(fname): def load(fname):
""" """
@ -120,12 +255,13 @@ class Table:
return Table(data,shapes,comments) return Table(data,shapes,comments)
@staticmethod @staticmethod
def load_ang(fname): def load_ang(fname):
""" """
Load from ang file. Load from ang file.
A valid TSL ang file needs to contains the following columns: A valid TSL ang file has to have the following columns:
- Euler angles (Bunge notation) in radians, 3 floats, label 'eu'. - Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
- Spatial position in meters, 2 floats, label 'pos'. - Spatial position in meters, 2 floats, label 'pos'.
@ -186,11 +322,7 @@ class Table:
Array of column data. Array of column data.
""" """
if re.match(r'[0-9]*?_',label): data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label])
idx,key = label.split('_',1)
data = self.data[key].to_numpy()[:,int(idx)-1].reshape(-1,1)
else:
data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label])
return data.astype(type(data.flatten()[0])) return data.astype(type(data.flatten()[0]))
@ -216,10 +348,12 @@ class Table:
""" """
dup = self.copy() dup = self.copy()
dup._add_comment(label,data.shape[1:],info) dup._add_comment(label,data.shape[1:],info)
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
if re.match(r'[0-9]*?_',label): if m:
idx,key = label.split('_',1) key = m.group(1)
iloc = dup.data.columns.get_loc(key).tolist().index(True) + int(idx) -1 idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
self.shapes[key])
iloc = dup.data.columns.get_loc(key).tolist().index(True) + idx
dup.data.iloc[:,iloc] = data dup.data.iloc[:,iloc] = data
else: else:
dup.data[label] = data.reshape(dup.data[label].shape) dup.data[label] = data.reshape(dup.data[label].shape)
@ -322,10 +456,18 @@ class Table:
Updated table. Updated table.
""" """
labels_ = [labels] if isinstance(labels,str) else labels.copy()
for i,l in enumerate(labels_):
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
if m:
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
self.shapes[m.group(1)])
labels_[i] = f'{1+idx}_{m.group(1)}'
dup = self.copy() dup = self.copy()
dup._label_discrete() dup._relabel('linear')
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending) dup.data.sort_values(labels_,axis=0,inplace=True,ascending=ascending)
dup._label_uniform() dup._relabel('uniform')
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}') dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
return dup return dup

View File

@ -399,7 +399,7 @@ def DREAM3D_cell_data_group(fname):
""" """
base_group = DREAM3D_base_group(fname) base_group = DREAM3D_base_group(fname)
with h5py.File(fname,'r') as f: with h5py.File(fname,'r') as f:
cells = tuple(f['/'.join((base_group,'_SIMPL_GEOMETRY','DIMENSIONS'))][()][::-1]) cells = tuple(f['/'.join([base_group,'_SIMPL_GEOMETRY','DIMENSIONS'])][()][::-1])
cell_data_group = f[base_group].visititems(lambda path,obj: path.split('/')[0] \ cell_data_group = f[base_group].visititems(lambda path,obj: path.split('/')[0] \
if isinstance(obj,h5py._hl.dataset.Dataset) and np.shape(obj)[:-1] == cells \ if isinstance(obj,h5py._hl.dataset.Dataset) and np.shape(obj)[:-1] == cells \
else None) else None)

View File

@ -2,6 +2,7 @@ import setuptools
from pathlib import Path from pathlib import Path
import re import re
# https://www.python.org/dev/peps/pep-0440
with open(Path(__file__).parent/'damask/VERSION') as f: with open(Path(__file__).parent/'damask/VERSION') as f:
version = re.sub(r'(-([^-]*)).*$',r'.\2',re.sub(r'^v(\d+\.\d+(\.\d+)?)',r'\1',f.readline().strip())) version = re.sub(r'(-([^-]*)).*$',r'.\2',re.sub(r'^v(\d+\.\d+(\.\d+)?)',r'\1',f.readline().strip()))

View File

@ -86,9 +86,12 @@ class TestConfigMaterial:
def test_from_table(self): def test_from_table(self):
N = np.random.randint(3,10) N = np.random.randint(3,10)
a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2))).T a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),
t = Table(a,{'varying':1,'constant':4}) np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2),
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'4_constant'}) np.ones(N*2),
)).T
t = Table(a,{'varying':1,'constant':4,'ones':1})
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'ones'})
assert len(c['material']) == N assert len(c['material']) == N
for i,m in enumerate(c['material']): for i,m in enumerate(c['material']):
assert m['homogenization'] == 1 and (m['constituents'][0]['O'] == [1,0,1,1]).all() assert m['homogenization'] == 1 and (m['constituents'][0]['O'] == [1,0,1,1]).all()

View File

@ -407,7 +407,8 @@ class TestGrid:
z=np.ones(cells.prod()) z=np.ones(cells.prod())
z[cells[:2].prod()*int(cells[2]/2):]=0 z[cells[:2].prod()*int(cells[2]/2):]=0
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1}) t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
g = Grid.from_table(t,'coords',['1_coords','z']) t = t.add('indicator',t.get('coords')[:,0])
g = Grid.from_table(t,'coords',['indicator','z'])
assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all() assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()

View File

@ -269,9 +269,8 @@ class TestResult:
last = default.view('times',default.times_in_range(0,np.inf)[-1]) last = default.view('times',default.times_in_range(0,np.inf)[-1])
last.add_stress_Cauchy() last.add_stress_Cauchy()
with h5py.File(last.fname,'r') as f:
created_first = default.place('sigma').dtype.metadata['created'] created_first = last.place('sigma').dtype.metadata['created']
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z') created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on': if overwrite == 'on':
@ -284,8 +283,8 @@ class TestResult:
last.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress') last.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
except ValueError: except ValueError:
pass pass
with h5py.File(last.fname,'r') as f:
created_second = last.place('sigma').dtype.metadata['created'] created_second = last.place('sigma').dtype.metadata['created']
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z') created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on': if overwrite == 'on':

View File

@ -36,13 +36,33 @@ class TestTable:
d = default.get('F') d = default.get('F')
assert np.allclose(d,1.0) and d.shape[1:] == (3,3) assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
def test_get_component(self,default): def test_set(self,default):
d = default.get('5_F') d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
assert np.allclose(d,1.0) and d.shape[1:] == (1,) assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
@pytest.mark.parametrize('N',[10,40]) def test_set_component(self,default):
def test_getitem(self,N): d = default.set('F[0,0]',np.zeros((5)),'set to zero').get('F')
assert len(Table(np.random.rand(N,1),{'X':1})[:N//2]) == N//2 assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
def test_labels(self,default):
assert default.labels == ['F','v','s']
def test_add(self,default):
d = np.random.random((5,9))
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
def test_isclose(self,default):
assert default.isclose(default).all()
def test_allclose(self,default):
assert default.allclose(default)
@pytest.mark.parametrize('N',[1,3,4])
def test_slice(self,default,N):
assert len(default[:N]) == 1+N
assert len(default[:N,['F','s']]) == 1+N
assert default[N:].get('F').shape == (len(default)-N,3,3)
assert (default[:N,['v','s']].data == default['v','s'][:N].data).all().all()
@pytest.mark.parametrize('mode',['str','path']) @pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmp_path,mode): def test_write_read(self,default,tmp_path,mode):
@ -84,21 +104,6 @@ class TestTable:
with open(ref_path/fname) as f: with open(ref_path/fname) as f:
Table.load(f) Table.load(f)
def test_set(self,default):
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
def test_set_component(self,default):
d = default.set('1_F',np.zeros((5)),'set to zero').get('F')
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
def test_labels(self,default):
assert default.labels == ['F','v','s']
def test_add(self,default):
d = np.random.random((5,9))
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
def test_rename_equivalent(self): def test_rename_equivalent(self):
x = np.random.random((5,13)) x = np.random.random((5,13))
t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data']) t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data'])
@ -169,15 +174,15 @@ class TestTable:
def test_sort_component(self): def test_sort_component(self):
x = np.random.random((5,12)) x = np.random.random((5,12))
t = Table(x,{'F':(3,3),'v':(3,)},['random test data']) t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
unsort = t.get('4_F') unsort = t.get('F')[:,1,0]
sort = t.sort_by('4_F').get('4_F') sort = t.sort_by('F[1,0]').get('F')[:,1,0]
assert np.all(np.sort(unsort,0)==sort) assert np.all(np.sort(unsort,0)==sort)
def test_sort_revert(self): def test_sort_revert(self):
x = np.random.random((5,12)) x = np.random.random((5,12))
t = Table(x,{'F':(3,3),'v':(3,)},['random test data']) t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
sort = t.sort_by('4_F',ascending=False).get('4_F') sort = t.sort_by('F[1,0]',ascending=False).get('F')[:,1,0]
assert np.all(np.sort(sort,0)==sort[::-1,:]) assert np.all(np.sort(sort,0)==sort[::-1])
def test_sort(self): def test_sort(self):
t = Table(np.array([[0,1,],[2,1,]]), t = Table(np.array([[0,1,],[2,1,]]),
@ -185,4 +190,4 @@ class TestTable:
['test data'])\ ['test data'])\
.add('s',np.array(['b','a']))\ .add('s',np.array(['b','a']))\
.sort_by('s') .sort_by('s')
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1)) assert np.all(t.get('v')[:,0] == np.array([2,0]))

View File

@ -20,16 +20,10 @@ module YAML_types
procedure(asFormattedString), deferred :: asFormattedString procedure(asFormattedString), deferred :: asFormattedString
procedure :: & procedure :: &
asScalar => tNode_asScalar asScalar => tNode_asScalar
procedure :: &
isScalar => tNode_isScalar
procedure :: & procedure :: &
asList => tNode_asList asList => tNode_asList
procedure :: &
isList => tNode_isList
procedure :: & procedure :: &
asDict => tNode_asDict asDict => tNode_asDict
procedure :: &
isDict => tNode_isDict
procedure :: & procedure :: &
tNode_get_byIndex => tNode_get_byIndex tNode_get_byIndex => tNode_get_byIndex
procedure :: & procedure :: &
@ -67,9 +61,9 @@ module YAML_types
procedure :: & procedure :: &
tNode_get_byKey_as1dString => tNode_get_byKey_as1dString tNode_get_byKey_as1dString => tNode_get_byKey_as1dString
procedure :: & procedure :: &
getIndex => tNode_get_byKey_asIndex getKey => tNode_get_byIndex_asKey
procedure :: & procedure :: &
getKey => tNode_getKey_byIndex getIndex => tNode_get_byKey_asIndex
procedure :: & procedure :: &
contains => tNode_contains contains => tNode_contains
procedure :: & procedure :: &
@ -129,7 +123,7 @@ module YAML_types
procedure :: asFormattedString => tList_asFormattedString procedure :: asFormattedString => tList_asFormattedString
procedure :: append => tList_append procedure :: append => tList_append
procedure :: & procedure :: &
as1dFloat => tList_as1dFloat as1dFloat => tList_as1dFloat
procedure :: & procedure :: &
as2dFloat => tList_as2dFloat as2dFloat => tList_as2dFloat
procedure :: & procedure :: &
@ -341,8 +335,6 @@ function tNode_asScalar(self) result(scalar)
select type(self) select type(self)
class is(tScalar) class is(tScalar)
scalar => self scalar => self
class default
call IO_error(706,ext_msg='Expected "scalar"')
end select end select
end function tNode_asScalar end function tNode_asScalar
@ -359,8 +351,6 @@ function tNode_asList(self) result(list)
select type(self) select type(self)
class is(tList) class is(tList)
list => self list => self
class default
call IO_error(706,ext_msg='Expected "list"')
end select end select
end function tNode_asList end function tNode_asList
@ -377,64 +367,11 @@ function tNode_asDict(self) result(dict)
select type(self) select type(self)
class is(tDict) class is(tDict)
dict => self dict => self
class default
call IO_error(706,ext_msg='Expected "dict"')
end select end select
end function tNode_asDict end function tNode_asDict
!--------------------------------------------------------------------------------------------------
!> @brief Checks if node is a scalar
!--------------------------------------------------------------------------------------------------
function tNode_isScalar(self) result(scalar)
class(tNode), intent(in), target :: self
logical :: scalar
scalar = .false.
select type(self)
class is(tScalar)
scalar = .true.
end select
end function tNode_isScalar
!--------------------------------------------------------------------------------------------------
!> @brief Checks if node is a list
!--------------------------------------------------------------------------------------------------
function tNode_isList(self) result(list)
class(tNode), intent(in), target :: self
logical :: list
list = .false.
select type(self)
class is(tList)
list = .true.
end select
end function tNode_isList
!--------------------------------------------------------------------------------------------------
!> @brief Checks if node is a dict
!--------------------------------------------------------------------------------------------------
function tNode_isDict(self) result(dict)
class(tNode), intent(in), target :: self
logical :: dict
dict = .false.
select type(self)
class is(tDict)
dict = .true.
end select
end function tNode_isDict
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Access by index !> @brief Access by index
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -448,14 +385,19 @@ function tNode_get_byIndex(self,i) result(node)
class(tItem), pointer :: item class(tItem), pointer :: item
integer :: j integer :: j
self_ => self%asList() select type(self)
class is(tList)
self_ => self%asList()
class default
call IO_error(706,ext_msg='Expected list')
endselect
item => self_%first
if (i < 1 .or. i > self_%length) call IO_error(150,ext_msg='tNode_get_byIndex') if (i < 1 .or. i > self_%length) call IO_error(150,ext_msg='tNode_get_byIndex')
j = 1 do j = 2,i
item => self_%first
do while(j<i)
item => item%next item => item%next
j = j + 1
enddo enddo
node => item%node node => item%node
@ -475,8 +417,13 @@ function tNode_get_byIndex_asFloat(self,i) result(nodeAsFloat)
type(tScalar), pointer :: scalar type(tScalar), pointer :: scalar
node => self%get(i) node => self%get(i)
scalar => node%asScalar() select type(node)
nodeAsFloat = scalar%asFloat() class is(tScalar)
scalar => node%asScalar()
nodeAsFloat = scalar%asFloat()
class default
call IO_error(706,ext_msg='Expected scalar float')
end select
end function tNode_get_byIndex_asFloat end function tNode_get_byIndex_asFloat
@ -494,8 +441,13 @@ function tNode_get_byIndex_asInt(self,i) result(nodeAsInt)
type(tScalar), pointer :: scalar type(tScalar), pointer :: scalar
node => self%get(i) node => self%get(i)
scalar => node%asScalar() select type(node)
nodeAsInt = scalar%asInt() class is(tScalar)
scalar => node%asScalar()
nodeAsInt = scalar%asInt()
class default
call IO_error(706,ext_msg='Expected scalar integer')
end select
end function tNode_get_byIndex_asInt end function tNode_get_byIndex_asInt
@ -513,8 +465,13 @@ function tNode_get_byIndex_asBool(self,i) result(nodeAsBool)
type(tScalar), pointer :: scalar type(tScalar), pointer :: scalar
node => self%get(i) node => self%get(i)
scalar => node%asScalar() select type(node)
nodeAsBool = scalar%asBool() class is(tScalar)
scalar => node%asScalar()
nodeAsBool = scalar%asBool()
class default
call IO_error(706,ext_msg='Expected scalar Boolean')
endselect
end function tNode_get_byIndex_asBool end function tNode_get_byIndex_asBool
@ -532,8 +489,13 @@ function tNode_get_byIndex_asString(self,i) result(nodeAsString)
type(tScalar), pointer :: scalar type(tScalar), pointer :: scalar
node => self%get(i) node => self%get(i)
scalar => node%asScalar() select type(node)
nodeAsString = scalar%asString() class is(tScalar)
scalar => node%asScalar()
nodeAsString = scalar%asString()
class default
call IO_error(706,ext_msg='Expected scalar string')
endselect
end function tNode_get_byIndex_asString end function tNode_get_byIndex_asString
@ -547,12 +509,17 @@ function tNode_get_byIndex_as1dFloat(self,i) result(nodeAs1dFloat)
integer, intent(in) :: i integer, intent(in) :: i
real(pReal), dimension(:), allocatable :: nodeAs1dFloat real(pReal), dimension(:), allocatable :: nodeAs1dFloat
class(tNode), pointer :: node class(tNode), pointer :: node
class(tList), pointer :: list class(tList), pointer :: list
node => self%get(i) node => self%get(i)
list => node%asList() select type(node)
nodeAs1dFloat = list%as1dFloat() class is(tList)
list => node%asList()
nodeAs1dFloat = list%as1dFloat()
class default
call IO_error(706,ext_msg='Expected list of floats')
endselect
end function tNode_get_byIndex_as1dFloat end function tNode_get_byIndex_as1dFloat
@ -570,8 +537,13 @@ function tNode_get_byIndex_as1dInt(self,i) result(nodeAs1dInt)
class(tList), pointer :: list class(tList), pointer :: list
node => self%get(i) node => self%get(i)
list => node%asList() select type(node)
nodeAs1dInt = list%as1dInt() class is(tList)
list => node%asList()
nodeAs1dInt = list%as1dInt()
class default
call IO_error(706,ext_msg='Expected list of integers')
endselect
end function tNode_get_byIndex_as1dInt end function tNode_get_byIndex_as1dInt
@ -589,8 +561,13 @@ function tNode_get_byIndex_as1dBool(self,i) result(nodeAs1dBool)
class(tList), pointer :: list class(tList), pointer :: list
node => self%get(i) node => self%get(i)
list => node%asList() select type(node)
nodeAs1dBool = list%as1dBool() class is(tList)
list => node%asList()
nodeAs1dBool = list%as1dBool()
class default
call IO_error(706,ext_msg='Expected list of Booleans')
endselect
end function tNode_get_byIndex_as1dBool end function tNode_get_byIndex_as1dBool
@ -608,8 +585,13 @@ function tNode_get_byIndex_as1dString(self,i) result(nodeAs1dString)
type(tList), pointer :: list type(tList), pointer :: list
node => self%get(i) node => self%get(i)
list => node%asList() select type(node)
nodeAs1dString = list%as1dString() class is(tList)
list => node%asList()
nodeAs1dString = list%as1dString()
class default
call IO_error(706,ext_msg='Expected list of strings')
endselect
end function tNode_get_byIndex_as1dString end function tNode_get_byIndex_as1dString
@ -617,7 +599,7 @@ end function tNode_get_byIndex_as1dString
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Returns the key in a dictionary as a string !> @brief Returns the key in a dictionary as a string
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
function tNode_getKey_byIndex(self,i) result(key) function tNode_get_byIndex_asKey(self,i) result(key)
class(tNode), intent(in), target :: self class(tNode), intent(in), target :: self
integer, intent(in) :: i integer, intent(in) :: i
@ -627,18 +609,20 @@ function tNode_getKey_byIndex(self,i) result(key)
type(tDict), pointer :: dict type(tDict), pointer :: dict
type(tItem), pointer :: item type(tItem), pointer :: item
dict => self%asDict() select type(self)
item => dict%first class is(tDict)
do j = 1, dict%length dict => self%asDict()
if (j == i) then item => dict%first
key = item%key do j = 1, min(i,dict%length)-1
exit item => item%next
else enddo
item => item%next class default
endif call IO_error(706,ext_msg='Expected dict')
enddo endselect
end function tNode_getKey_byIndex key = item%key
end function tNode_get_byIndex_asKey
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
@ -655,25 +639,26 @@ function tNode_contains(self,k) result(exists)
type(tDict), pointer :: dict type(tDict), pointer :: dict
exists = .false. exists = .false.
if (self%isDict()) then select type(self)
dict => self%asDict() class is(tDict)
do j=1, dict%length dict => self%asDict()
if (dict%getKey(j) == k) then do j=1, dict%length
exists = .true. if (dict%getKey(j) == k) then
return exists = .true.
endif return
enddo endif
elseif (self%isList()) then enddo
list => self%asList() class is(tList)
do j=1, list%length list => self%asList()
if (list%get_asString(j) == k) then do j=1, list%length
exists = .true. if (list%get_asString(j) == k) then
return exists = .true.
endif return
enddo endif
else enddo
call IO_error(706,ext_msg='Expected "list" or "dict"') class default
endif call IO_error(706,ext_msg='Expected list or dict')
endselect
end function tNode_contains end function tNode_contains
@ -696,7 +681,12 @@ function tNode_get_byKey(self,k,defaultVal) result(node)
found = present(defaultVal) found = present(defaultVal)
if (found) node => defaultVal if (found) node => defaultVal
self_ => self%asDict() select type(self)
class is(tDict)
self_ => self%asDict()
class default
call IO_error(706,ext_msg='Expected dict for key '//k)
endselect
j = 1 j = 1
item => self_%first item => self_%first
@ -733,8 +723,13 @@ function tNode_get_byKey_asFloat(self,k,defaultVal) result(nodeAsFloat)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
scalar => node%asScalar() select type(node)
nodeAsFloat = scalar%asFloat() class is(tScalar)
scalar => node%asScalar()
nodeAsFloat = scalar%asFloat()
class default
call IO_error(706,ext_msg='Expected scalar float for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAsFloat = defaultVal nodeAsFloat = defaultVal
else else
@ -759,8 +754,13 @@ function tNode_get_byKey_asInt(self,k,defaultVal) result(nodeAsInt)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
scalar => node%asScalar() select type(node)
nodeAsInt = scalar%asInt() class is(tScalar)
scalar => node%asScalar()
nodeAsInt = scalar%asInt()
class default
call IO_error(706,ext_msg='Expected scalar integer for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAsInt = defaultVal nodeAsInt = defaultVal
else else
@ -785,8 +785,13 @@ function tNode_get_byKey_asBool(self,k,defaultVal) result(nodeAsBool)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
scalar => node%asScalar() select type(node)
nodeAsBool = scalar%asBool() class is(tScalar)
scalar => node%asScalar()
nodeAsBool = scalar%asBool()
class default
call IO_error(706,ext_msg='Expected scalar Boolean for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAsBool = defaultVal nodeAsBool = defaultVal
else else
@ -811,8 +816,13 @@ function tNode_get_byKey_asString(self,k,defaultVal) result(nodeAsString)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
scalar => node%asScalar() select type(node)
nodeAsString = scalar%asString() class is(tScalar)
scalar => node%asScalar()
nodeAsString = scalar%asString()
class default
call IO_error(706,ext_msg='Expected scalar string for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAsString = defaultVal nodeAsString = defaultVal
else else
@ -839,8 +849,13 @@ function tNode_get_byKey_as1dFloat(self,k,defaultVal,requiredSize) result(nodeAs
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
list => node%asList() select type(self)
nodeAs1dFloat = list%as1dFloat() class is(tList)
list => node%asList()
nodeAs1dFloat = list%as1dFloat()
class default
call IO_error(706,ext_msg='Expected 1D float array for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAs1dFloat = defaultVal nodeAs1dFloat = defaultVal
else else
@ -870,8 +885,13 @@ function tNode_get_byKey_as2dFloat(self,k,defaultVal) result(nodeAs2dFloat)
if(self%contains(k)) then if(self%contains(k)) then
node => self%get(k) node => self%get(k)
rows => node%asList() select type(node)
nodeAs2dFloat = rows%as2dFloat() class is(tList)
rows => node%asList()
nodeAs2dFloat = rows%as2dFloat()
class default
call IO_error(706,ext_msg='Expected 2D float array for key '//k)
endselect
elseif(present(defaultVal)) then elseif(present(defaultVal)) then
nodeAs2dFloat = defaultVal nodeAs2dFloat = defaultVal
else else
@ -897,8 +917,13 @@ function tNode_get_byKey_as1dInt(self,k,defaultVal,requiredSize) result(nodeAs1d
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
list => node%asList() select type(node)
nodeAs1dInt = list%as1dInt() class is(tList)
list => node%asList()
nodeAs1dInt = list%as1dInt()
class default
call IO_error(706,ext_msg='Expected 1D integer array for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAs1dInt = defaultVal nodeAs1dInt = defaultVal
else else
@ -927,8 +952,13 @@ function tNode_get_byKey_as1dBool(self,k,defaultVal) result(nodeAs1dBool)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
list => node%asList() select type(node)
nodeAs1dBool = list%as1dBool() class is(tList)
list => node%asList()
nodeAs1dBool = list%as1dBool()
class default
call IO_error(706,ext_msg='Expected 1D Boolean array for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAs1dBool = defaultVal nodeAs1dBool = defaultVal
else else
@ -953,8 +983,13 @@ function tNode_get_byKey_as1dString(self,k,defaultVal) result(nodeAs1dString)
if (self%contains(k)) then if (self%contains(k)) then
node => self%get(k) node => self%get(k)
list => node%asList() select type(node)
nodeAs1dString = list%as1dString() class is(tList)
list => node%asList()
nodeAs1dString = list%as1dString()
class default
call IO_error(706,ext_msg='Expected 1D string array for key '//k)
endselect
elseif (present(defaultVal)) then elseif (present(defaultVal)) then
nodeAs1dString = defaultVal nodeAs1dString = defaultVal
else else
@ -981,7 +1016,6 @@ function output_as1dString(self) result(output) !ToDo: SR: Re
output(o) = output_list%get_asString(o) output(o) = output_list%get_asString(o)
enddo enddo
end function output_as1dString end function output_as1dString
@ -994,24 +1028,18 @@ function tNode_get_byKey_asIndex(self,key) result(keyIndex)
character(len=*), intent(in) :: key character(len=*), intent(in) :: key
integer :: keyIndex integer :: keyIndex
integer :: i
type(tDict), pointer :: dict type(tDict), pointer :: dict
type(tItem), pointer :: item type(tItem), pointer :: item
dict => self%asDict() dict => self%asDict()
item => dict%first item => dict%first
keyIndex = -1 keyIndex = 1
do i = 1, dict%length do while (associated(item%next) .and. item%key /= key)
if (key == item%key) then item => item%next
keyIndex = i keyIndex = keyIndex+1
exit
else
item => item%next
endif
enddo enddo
if (keyIndex == -1) call IO_error(140,ext_msg=key) if (item%key /= key) call IO_error(140,ext_msg=key)
end function tNode_get_byKey_asIndex end function tNode_get_byKey_asIndex
@ -1320,7 +1348,7 @@ subroutine tDict_set(self,key,node)
if (item%key == key) exit if (item%key == key) exit
item => item%next item => item%next
end do searchExisting end do searchExisting
if (.not. item%key == key) then if (item%key /= key) then
allocate(item%next) allocate(item%next)
item => item%next item => item%next
self%length = self%length + 1 self%length = self%length + 1