Merge branch 'Results.add_grid_filters' into 'development'
Result.add_xxx for functions on regular grids See merge request damask/DAMASK!394
This commit is contained in:
commit
0ef5825d35
|
@ -1,58 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing curl of requested column(s).
|
|
||||||
Operates on periodic ordered three-dimensional data sets of vector and tensor fields.
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
|
||||||
dest = 'pos',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'label of coordinates [%default]')
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'label(s) of field values')
|
|
||||||
|
|
||||||
parser.set_defaults(pos = 'pos',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if options.labels is None: parser.error('no data column specified.')
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
|
|
||||||
|
|
||||||
for label in options.labels:
|
|
||||||
field = table.get(label)
|
|
||||||
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
|
|
||||||
field = field.reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+shape)
|
|
||||||
curl = damask.grid_filters.curl(size,field)
|
|
||||||
table = table.add('curlFFT({})'.format(label),
|
|
||||||
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.save((sys.stdout if name is None else name))
|
|
|
@ -1,58 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing divergence of requested column(s).
|
|
||||||
Operates on periodic ordered three-dimensional data sets of vector and tensor fields.
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
|
||||||
dest = 'pos',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'label of coordinates [%default]')
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'label(s) of field values')
|
|
||||||
|
|
||||||
parser.set_defaults(pos = 'pos',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if options.labels is None: parser.error('no data column specified.')
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
|
|
||||||
|
|
||||||
for label in options.labels:
|
|
||||||
field = table.get(label)
|
|
||||||
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
|
|
||||||
field = field.reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+shape)
|
|
||||||
div = damask.grid_filters.divergence(size,field)
|
|
||||||
table = table.add('divFFT({})'.format(label),
|
|
||||||
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.save((sys.stdout if name is None else name))
|
|
|
@ -1,58 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing gradient of requested column(s).
|
|
||||||
Operates on periodic ordered three-dimensional data sets of scalar and vector fields.
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
|
||||||
dest = 'pos',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'label of coordinates [%default]')
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'label(s) of field values')
|
|
||||||
|
|
||||||
parser.set_defaults(pos = 'pos',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if options.labels is None: parser.error('no data column specified.')
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
|
|
||||||
|
|
||||||
for label in options.labels:
|
|
||||||
field = table.get(label)
|
|
||||||
shape = (1,) if np.prod(field.shape)//np.prod(grid) == 1 else (3,) # scalar or vector
|
|
||||||
field = field.reshape(tuple(grid)+(-1,),order='F')
|
|
||||||
grad = damask.grid_filters.gradient(size,field)
|
|
||||||
table = table.add('gradFFT({})'.format(label),
|
|
||||||
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.save((sys.stdout if name is None else name))
|
|
|
@ -163,11 +163,11 @@ class Config(dict):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def is_complete(self):
|
def is_complete(self):
|
||||||
"""Check for completeness."""
|
"""Check for completeness."""
|
||||||
pass
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def is_valid(self):
|
def is_valid(self):
|
||||||
"""Check for valid file layout."""
|
"""Check for valid file layout."""
|
||||||
pass
|
raise NotImplementedError
|
||||||
|
|
|
@ -26,6 +26,8 @@ from . import util
|
||||||
|
|
||||||
h5py3 = h5py.__version__[0] == '3'
|
h5py3 = h5py.__version__[0] == '3'
|
||||||
|
|
||||||
|
chunk_size = 1024**2//8 # for compression in HDF5
|
||||||
|
|
||||||
|
|
||||||
def _read(dataset):
|
def _read(dataset):
|
||||||
"""Read a dataset and its metadata into a numpy.ndarray."""
|
"""Read a dataset and its metadata into a numpy.ndarray."""
|
||||||
|
@ -107,6 +109,8 @@ class Result:
|
||||||
self.cells = f['geometry'].attrs['cells']
|
self.cells = f['geometry'].attrs['cells']
|
||||||
self.size = f['geometry'].attrs['size']
|
self.size = f['geometry'].attrs['size']
|
||||||
self.origin = f['geometry'].attrs['origin']
|
self.origin = f['geometry'].attrs['origin']
|
||||||
|
else:
|
||||||
|
self.add_curl = self.add_divergence = self.add_gradient = None
|
||||||
|
|
||||||
r=re.compile('increment_[0-9]+')
|
r=re.compile('increment_[0-9]+')
|
||||||
self.increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
|
self.increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
|
||||||
|
@ -187,12 +191,14 @@ class Result:
|
||||||
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
||||||
[datasets]
|
[datasets]
|
||||||
|
|
||||||
if what == 'increments':
|
what_ = what if what.endswith('s') else what+'s'
|
||||||
|
|
||||||
|
if what_ == 'increments':
|
||||||
choice = [c if isinstance(c,str) and c.startswith('increment_') else
|
choice = [c if isinstance(c,str) and c.startswith('increment_') else
|
||||||
self.increments[c] if isinstance(c,int) and c<0 else
|
self.increments[c] if isinstance(c,int) and c<0 else
|
||||||
f'increment_{c}' for c in choice]
|
f'increment_{c}' for c in choice]
|
||||||
elif what == 'times':
|
elif what_ == 'times':
|
||||||
what = 'increments'
|
what_ = 'increments'
|
||||||
if choice == ['*']:
|
if choice == ['*']:
|
||||||
choice = self.increments
|
choice = self.increments
|
||||||
else:
|
else:
|
||||||
|
@ -206,18 +212,18 @@ class Result:
|
||||||
elif np.isclose(c,self.times[idx+1]):
|
elif np.isclose(c,self.times[idx+1]):
|
||||||
choice.append(self.increments[idx+1])
|
choice.append(self.increments[idx+1])
|
||||||
|
|
||||||
valid = _match(choice,getattr(self,what))
|
valid = _match(choice,getattr(self,what_))
|
||||||
existing = set(self.visible[what])
|
existing = set(self.visible[what_])
|
||||||
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
if action == 'set':
|
if action == 'set':
|
||||||
dup.visible[what] = sorted(set(valid), key=util.natural_sort)
|
dup.visible[what_] = sorted(set(valid), key=util.natural_sort)
|
||||||
elif action == 'add':
|
elif action == 'add':
|
||||||
add = existing.union(valid)
|
add = existing.union(valid)
|
||||||
dup.visible[what] = sorted(add, key=util.natural_sort)
|
dup.visible[what_] = sorted(add, key=util.natural_sort)
|
||||||
elif action == 'del':
|
elif action == 'del':
|
||||||
diff = existing.difference(valid)
|
diff = existing.difference(valid)
|
||||||
dup.visible[what] = sorted(diff, key=util.natural_sort)
|
dup.visible[what_] = sorted(diff, key=util.natural_sort)
|
||||||
|
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
@ -1122,8 +1128,8 @@ class Result:
|
||||||
'label': f"{t}({F['label']})",
|
'label': f"{t}({F['label']})",
|
||||||
'meta': {
|
'meta': {
|
||||||
'unit': F['meta']['unit'],
|
'unit': F['meta']['unit'],
|
||||||
'description': '{} stretch tensor of {} ({})'.format('left' if t.upper() == 'V' else 'right',
|
'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "\
|
||||||
F['label'],F['meta']['description']),
|
+f"of {F['label']} ({F['meta']['description']})", # noqa
|
||||||
'creator': 'add_stretch_tensor'
|
'creator': 'add_stretch_tensor'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1143,7 +1149,148 @@ class Result:
|
||||||
self._add_generic_pointwise(self._add_stretch_tensor,{'F':F},{'t':t})
|
self._add_generic_pointwise(self._add_stretch_tensor,{'F':F},{'t':t})
|
||||||
|
|
||||||
|
|
||||||
def _job(self,group,func,datasets,args,lock):
|
@staticmethod
|
||||||
|
def _add_curl(f,size):
|
||||||
|
return {
|
||||||
|
'data': grid_filters.curl(size,f['data']),
|
||||||
|
'label': f"curl({f['label']})",
|
||||||
|
'meta': {
|
||||||
|
'unit': f['meta']['unit']+'/m',
|
||||||
|
'description': f"curl of {f['label']} ({f['meta']['description']})",
|
||||||
|
'creator': 'add_curl'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def add_curl(self,f):
|
||||||
|
"""
|
||||||
|
Add curl of a field.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
f : str
|
||||||
|
Name of vector or tensor field dataset.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
This function is only available for structured grids,
|
||||||
|
i.e. results from the grid solver.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._add_generic_grid(self._add_curl,{'f':f},{'size':self.size})
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _add_divergence(f,size):
|
||||||
|
return {
|
||||||
|
'data': grid_filters.divergence(size,f['data']),
|
||||||
|
'label': f"divergence({f['label']})",
|
||||||
|
'meta': {
|
||||||
|
'unit': f['meta']['unit']+'/m',
|
||||||
|
'description': f"divergence of {f['label']} ({f['meta']['description']})",
|
||||||
|
'creator': 'add_divergence'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def add_divergence(self,f):
|
||||||
|
"""
|
||||||
|
Add divergence of a field.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
f : str
|
||||||
|
Name of vector or tensor field dataset.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
This function is only available for structured grids,
|
||||||
|
i.e. results from the grid solver.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._add_generic_grid(self._add_divergence,{'f':f},{'size':self.size})
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _add_gradient(f,size):
|
||||||
|
return {
|
||||||
|
'data': grid_filters.gradient(size,f['data'] if len(f['data'].shape) == 4 else \
|
||||||
|
f['data'].reshape(f['data'].shape+(1,))),
|
||||||
|
'label': f"gradient({f['label']})",
|
||||||
|
'meta': {
|
||||||
|
'unit': f['meta']['unit']+'/m',
|
||||||
|
'description': f"gradient of {f['label']} ({f['meta']['description']})",
|
||||||
|
'creator': 'add_gradient'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def add_gradient(self,f):
|
||||||
|
"""
|
||||||
|
Add gradient of a field.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
f : str
|
||||||
|
Name of scalar or vector field dataset.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
This function is only available for structured grids,
|
||||||
|
i.e. results from the grid solver.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._add_generic_grid(self._add_gradient,{'f':f},{'size':self.size})
|
||||||
|
|
||||||
|
|
||||||
|
def _add_generic_grid(self,func,datasets,args={},constituents=None):
|
||||||
|
"""
|
||||||
|
General function to add data on a regular grid.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
func : function
|
||||||
|
Callback function that calculates a new dataset from one or
|
||||||
|
more datasets per HDF5 group.
|
||||||
|
datasets : dictionary
|
||||||
|
Details of the datasets to be used:
|
||||||
|
{arg (name to which the data is passed in func): label (in HDF5 file)}.
|
||||||
|
args : dictionary, optional
|
||||||
|
Arguments parsed to func.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if len(datasets) != 1 or self.N_constituents !=1:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
at_cell_ph,in_data_ph,at_cell_ho,in_data_ho = self._mappings()
|
||||||
|
|
||||||
|
with h5py.File(self.fname, 'a') as f:
|
||||||
|
for increment in self.place(datasets.values(),False).items():
|
||||||
|
for ty in increment[1].items():
|
||||||
|
for field in ty[1].items():
|
||||||
|
d = list(field[1].values())[0]
|
||||||
|
if np.any(d.mask): continue
|
||||||
|
dataset = {'f':{'data':np.reshape(d.data,tuple(self.cells)+d.data.shape[1:]),
|
||||||
|
'label':list(datasets.values())[0],
|
||||||
|
'meta':d.data.dtype.metadata}}
|
||||||
|
r = func(**dataset,**args)
|
||||||
|
result = r['data'].reshape((-1,)+r['data'].shape[3:])
|
||||||
|
for x in self.visible[ty[0]+'s']:
|
||||||
|
if ty[0] == 'phase':
|
||||||
|
result1 = result[at_cell_ph[0][x]]
|
||||||
|
if ty[0] == 'homogenization':
|
||||||
|
result1 = result[at_cell_ho[x]]
|
||||||
|
|
||||||
|
path = '/'.join(['/',increment[0],ty[0],x,field[0]])
|
||||||
|
dataset = f[path].create_dataset(r['label'],data=result1)
|
||||||
|
|
||||||
|
now = datetime.datetime.now().astimezone()
|
||||||
|
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||||
|
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
|
for l,v in r['meta'].items():
|
||||||
|
dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
||||||
|
creator = dataset.attrs['creator'] if h5py3 else \
|
||||||
|
dataset.attrs['creator'].decode()
|
||||||
|
dataset.attrs['creator'] = f'damask.Result.{creator} v{damask.version}' if h5py3 else \
|
||||||
|
f'damask.Result.{creator} v{damask.version}'.encode()
|
||||||
|
|
||||||
|
|
||||||
|
def _job_pointwise(self,group,func,datasets,args,lock):
|
||||||
"""Execute job for _add_generic_pointwise."""
|
"""Execute job for _add_generic_pointwise."""
|
||||||
try:
|
try:
|
||||||
datasets_in = {}
|
datasets_in = {}
|
||||||
|
@ -1159,8 +1306,7 @@ class Result:
|
||||||
return [group,r]
|
return [group,r]
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print(f'Error during calculation: {err}.')
|
print(f'Error during calculation: {err}.')
|
||||||
return None
|
return [None,None]
|
||||||
|
|
||||||
|
|
||||||
def _add_generic_pointwise(self,func,datasets,args={}):
|
def _add_generic_pointwise(self,func,datasets,args={}):
|
||||||
"""
|
"""
|
||||||
|
@ -1178,7 +1324,6 @@ class Result:
|
||||||
Arguments parsed to func.
|
Arguments parsed to func.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
chunk_size = 1024**2//8
|
|
||||||
pool = mp.Pool(int(os.environ.get('OMP_NUM_THREADS',4)))
|
pool = mp.Pool(int(os.environ.get('OMP_NUM_THREADS',4)))
|
||||||
lock = mp.Manager().Lock()
|
lock = mp.Manager().Lock()
|
||||||
|
|
||||||
|
@ -1195,34 +1340,34 @@ class Result:
|
||||||
print('No matching dataset found, no data was added.')
|
print('No matching dataset found, no data was added.')
|
||||||
return
|
return
|
||||||
|
|
||||||
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
default_arg = partial(self._job_pointwise,func=func,datasets=datasets,args=args,lock=lock)
|
||||||
|
|
||||||
for result in util.show_progress(pool.imap_unordered(default_arg,groups),len(groups)):
|
for group,result in util.show_progress(pool.imap_unordered(default_arg,groups),len(groups)):
|
||||||
if not result:
|
if not result:
|
||||||
continue
|
continue
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
with h5py.File(self.fname, 'a') as f:
|
with h5py.File(self.fname, 'a') as f:
|
||||||
try:
|
try:
|
||||||
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
if self._allow_modification and '/'.join([group,result['label']]) in f:
|
||||||
dataset = f[result[0]+'/'+result[1]['label']]
|
dataset = f['/'.join([group,result['label']])]
|
||||||
dataset[...] = result[1]['data']
|
dataset[...] = result['data']
|
||||||
dataset.attrs['overwritten'] = True
|
dataset.attrs['overwritten'] = True
|
||||||
else:
|
else:
|
||||||
if result[1]['data'].size >= chunk_size*2:
|
if result['data'].size >= chunk_size*2:
|
||||||
shape = result[1]['data'].shape
|
shape = result['data'].shape
|
||||||
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
|
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
|
dataset = f[group].create_dataset(result['label'],data=result['data'],
|
||||||
maxshape=shape, chunks=chunks,
|
maxshape=shape, chunks=chunks,
|
||||||
compression='gzip', compression_opts=6,
|
compression='gzip', compression_opts=6,
|
||||||
shuffle=True,fletcher32=True)
|
shuffle=True,fletcher32=True)
|
||||||
else:
|
else:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
dataset = f[group].create_dataset(result['label'],data=result['data'])
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
now = datetime.datetime.now().astimezone()
|
||||||
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||||
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
for l,v in result[1]['meta'].items():
|
for l,v in result['meta'].items():
|
||||||
dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
||||||
creator = dataset.attrs['creator'] if h5py3 else \
|
creator = dataset.attrs['creator'] if h5py3 else \
|
||||||
dataset.attrs['creator'].decode()
|
dataset.attrs['creator'].decode()
|
||||||
|
|
|
@ -49,10 +49,12 @@ class TestConfig:
|
||||||
assert Config({'A':np.ones(3,'i')}).__repr__() == Config({'A':[1,1,1]}).__repr__()
|
assert Config({'A':np.ones(3,'i')}).__repr__() == Config({'A':[1,1,1]}).__repr__()
|
||||||
|
|
||||||
def test_abstract_is_valid(self):
|
def test_abstract_is_valid(self):
|
||||||
assert Config().is_valid is None
|
with pytest.raises(NotImplementedError):
|
||||||
|
Config().is_valid
|
||||||
|
|
||||||
def test_abstract_is_complete(self):
|
def test_abstract_is_complete(self):
|
||||||
assert Config().is_complete is None
|
with pytest.raises(NotImplementedError):
|
||||||
|
Config().is_complete
|
||||||
|
|
||||||
@pytest.mark.parametrize('data',[Rotation.from_random(),Orientation.from_random(lattice='cI')])
|
@pytest.mark.parametrize('data',[Rotation.from_random(),Orientation.from_random(lattice='cI')])
|
||||||
def test_rotation_orientation(self,data):
|
def test_rotation_orientation(self,data):
|
||||||
|
|
|
@ -269,6 +269,38 @@ class TestResult:
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
default.add_calculation('#invalid#*2')
|
default.add_calculation('#invalid#*2')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shape',['vector','tensor'])
|
||||||
|
def test_add_curl(self,default,shape):
|
||||||
|
if shape == 'vector': default.add_calculation('#F#[:,:,0]','x','1','just a vector')
|
||||||
|
if shape == 'tensor': default.add_calculation('#F#[:,:,:]','x','1','just a tensor')
|
||||||
|
x = default.place('x')
|
||||||
|
default.add_curl('x')
|
||||||
|
in_file = default.place('curl(x)')
|
||||||
|
in_memory = grid_filters.curl(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape)
|
||||||
|
assert (in_file==in_memory).all()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shape',['vector','tensor'])
|
||||||
|
def test_add_divergence(self,default,shape):
|
||||||
|
if shape == 'vector': default.add_calculation('#F#[:,:,0]','x','1','just a vector')
|
||||||
|
if shape == 'tensor': default.add_calculation('#F#[:,:,:]','x','1','just a tensor')
|
||||||
|
x = default.place('x')
|
||||||
|
default.add_divergence('x')
|
||||||
|
in_file = default.place('divergence(x)')
|
||||||
|
in_memory = grid_filters.divergence(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape)
|
||||||
|
assert (in_file==in_memory).all()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shape',['scalar','pseudo_scalar','vector'])
|
||||||
|
def test_add_gradient(self,default,shape):
|
||||||
|
if shape == 'pseudo_scalar': default.add_calculation('#F#[:,0,0:1]','x','1','a pseudo scalar')
|
||||||
|
if shape == 'scalar': default.add_calculation('#F#[:,0,0]','x','1','just a scalar')
|
||||||
|
if shape == 'vector': default.add_calculation('#F#[:,:,1]','x','1','just a vector')
|
||||||
|
x = default.place('x').reshape((np.product(default.cells),-1))
|
||||||
|
default.add_gradient('x')
|
||||||
|
in_file = default.place('gradient(x)')
|
||||||
|
in_memory = grid_filters.gradient(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape)
|
||||||
|
assert (in_file==in_memory).all()
|
||||||
|
|
||||||
@pytest.mark.parametrize('overwrite',['off','on'])
|
@pytest.mark.parametrize('overwrite',['off','on'])
|
||||||
def test_add_overwrite(self,default,overwrite):
|
def test_add_overwrite(self,default,overwrite):
|
||||||
last = default.view('increments',-1)
|
last = default.view('increments',-1)
|
||||||
|
|
Loading…
Reference in New Issue