regrid/map when exporting DADF5
This commit is contained in:
parent
69e4d17be2
commit
d2e85ef15b
|
@ -1044,7 +1044,7 @@ class Grid:
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
idx : numpy.ndarray of int, shape (:,:,:) or (:,:,:,3)
|
idx : numpy.ndarray of int, shape (:,:,:) or (:,:,:,3)
|
||||||
Grid of flat indices or coordinate indices.
|
Grid of flat indices or coordinate indices.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
|
|
@ -1927,7 +1927,8 @@ class Result:
|
||||||
|
|
||||||
def export_DADF5(self,
|
def export_DADF5(self,
|
||||||
fname,
|
fname,
|
||||||
output: Union[str, List[str]] = '*'):
|
output: Union[str, List[str]] = '*',
|
||||||
|
mapping = None):
|
||||||
"""
|
"""
|
||||||
Export visible components into a new DADF5 file.
|
Export visible components into a new DADF5 file.
|
||||||
|
|
||||||
|
@ -1941,20 +1942,61 @@ class Result:
|
||||||
output : (list of) str, optional
|
output : (list of) str, optional
|
||||||
Names of the datasets to export.
|
Names of the datasets to export.
|
||||||
Defaults to '*', in which case all visible datasets are exported.
|
Defaults to '*', in which case all visible datasets are exported.
|
||||||
|
mapping : numpy.ndarray of int, shape (:,:,:), optional
|
||||||
|
Indices for regridding.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if Path(fname).expanduser().absolute() == self.fname:
|
if Path(fname).expanduser().absolute() == self.fname:
|
||||||
raise PermissionError(f'cannot overwrite {self.fname}')
|
raise PermissionError(f'cannot overwrite {self.fname}')
|
||||||
|
|
||||||
|
def cp(path_in,path_out,label,mapping):
|
||||||
|
if mapping is None:
|
||||||
|
path_in.copy(label,path_out)
|
||||||
|
else:
|
||||||
|
path_out.create_dataset(label,data=path_in[label][()][mapping])
|
||||||
|
path_out[label].attrs.update(path_in[label].attrs)
|
||||||
|
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f_in, h5py.File(fname,'w') as f_out:
|
with h5py.File(self.fname,'r') as f_in, h5py.File(fname,'w') as f_out:
|
||||||
for k,v in f_in.attrs.items():
|
f_out.attrs.update(f_in.attrs)
|
||||||
f_out.attrs.create(k,v)
|
for g in ['setup','geometry'] + (['cell_to'] if mapping is None else []):
|
||||||
for g in ['setup','geometry','cell_to']:
|
|
||||||
f_in.copy(g,f_out)
|
f_in.copy(g,f_out)
|
||||||
|
|
||||||
|
if mapping is not None:
|
||||||
|
cells = mapping.shape
|
||||||
|
mapping_flat = mapping.flatten(order='F')
|
||||||
|
f_out['geometry'].attrs['cells'] = cells
|
||||||
|
f_out.create_group('cell_to') # ToDo: attribute missing
|
||||||
|
mappings = {'phase':{},'homogenization':{}} # type: ignore
|
||||||
|
|
||||||
|
mapping_phase = f_in['cell_to']['phase'][()][mapping_flat]
|
||||||
|
for p in np.unique(mapping_phase['label']):
|
||||||
|
m = mapping_phase['label'] == p
|
||||||
|
mappings['phase'][p] = mapping_phase[m]['entry']
|
||||||
|
c = np.count_nonzero(m)
|
||||||
|
mapping_phase[m] = list(zip((p,)*c,tuple(np.arange(c))))
|
||||||
|
f_out['cell_to'].create_dataset('phase',data=mapping_phase.reshape(np.prod(mapping_flat.shape),-1))
|
||||||
|
|
||||||
|
mapping_homog = f_in['cell_to']['homogenization'][()][mapping]
|
||||||
|
for h in np.unique(mapping_homog['label']):
|
||||||
|
m = mapping_homog['label'] == h
|
||||||
|
mappings['homogenization'][h] = mapping_homog[m]['entry']
|
||||||
|
c = np.count_nonzero(m)
|
||||||
|
mapping_homog[mapping_homog['label'] == h] = list(zip((h,)*c,tuple(np.arange(c))))
|
||||||
|
f_out['cell_to'].create_dataset('homogenization',data=mapping_homog.flatten())
|
||||||
|
|
||||||
|
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self.visible['increments']):
|
||||||
f_in.copy(inc,f_out,shallow=True)
|
f_in.copy(inc,f_out,shallow=True)
|
||||||
for out in _match(output,f_in['/'.join([inc,'geometry'])].keys()):
|
if mapping is None:
|
||||||
f_in[inc]['geometry'].copy(out,f_out[inc]['geometry'])
|
for label in ['u_p','u_n']:
|
||||||
|
f_in[inc]['geometry'].copy(label,f_out[inc]['geometry'])
|
||||||
|
else:
|
||||||
|
u_p = f_in[inc]['geometry']['u_p'][()][mapping_flat]
|
||||||
|
f_out[inc]['geometry'].create_dataset('u_p',data=u_p)
|
||||||
|
u_n = np.zeros((len(mapping_flat),3)) # ToDo: needs implementation
|
||||||
|
f_out[inc]['geometry'].create_dataset('u_n',data=u_n)
|
||||||
|
|
||||||
|
|
||||||
for label in self.homogenizations:
|
for label in self.homogenizations:
|
||||||
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
|
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
|
||||||
|
@ -1966,7 +2008,7 @@ class Result:
|
||||||
for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
|
for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
|
||||||
p = '/'.join([inc,ty,label,field])
|
p = '/'.join([inc,ty,label,field])
|
||||||
for out in _match(output,f_in[p].keys()):
|
for out in _match(output,f_in[p].keys()):
|
||||||
f_in[p].copy(out,f_out[p])
|
cp(f_in[p],f_out[p],out,None if mapping is None else mappings[ty][label.encode()])
|
||||||
|
|
||||||
|
|
||||||
def export_simulation_setup(self,
|
def export_simulation_setup(self,
|
||||||
|
|
|
@ -603,11 +603,13 @@ class TestResult:
|
||||||
assert 'material.yaml' in os.listdir(absdir); (absdir/'material.yaml').unlink()
|
assert 'material.yaml' in os.listdir(absdir); (absdir/'material.yaml').unlink()
|
||||||
|
|
||||||
@pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',
|
@pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',
|
||||||
'6grains6x7x8_single_phase_tensionY.hdf5'])
|
'6grains6x7x8_single_phase_tensionY.hdf5',
|
||||||
|
'12grains6x7x8_tensionY.hdf5',
|
||||||
|
'check_compile_job1.hdf5',])
|
||||||
def test_export_DADF5(self,ref_path,tmp_path,fname):
|
def test_export_DADF5(self,ref_path,tmp_path,fname):
|
||||||
r = Result(ref_path/fname)
|
r = Result(ref_path/fname)
|
||||||
r = r.view(phases = random.sample(r.phases,1))
|
r = r.view(phases = random.sample(r.phases,1))
|
||||||
r = r.view(increments = random.sample(r.increments,np.random.randint(2,len(r.increments))))
|
r = r.view(increments = random.sample(r.increments,np.random.randint(1,len(r.increments))))
|
||||||
r.export_DADF5(tmp_path/fname)
|
r.export_DADF5(tmp_path/fname)
|
||||||
r_exp = Result(tmp_path/fname)
|
r_exp = Result(tmp_path/fname)
|
||||||
assert str(r.get()) == str(r_exp.get())
|
assert str(r.get()) == str(r_exp.get())
|
||||||
|
@ -619,3 +621,12 @@ class TestResult:
|
||||||
r = Result(ref_path/fname)
|
r = Result(ref_path/fname)
|
||||||
with pytest.raises(PermissionError):
|
with pytest.raises(PermissionError):
|
||||||
r.export_DADF5(r.fname)
|
r.export_DADF5(r.fname)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',
|
||||||
|
'6grains6x7x8_single_phase_tensionY.hdf5',
|
||||||
|
'12grains6x7x8_tensionY.hdf5'])
|
||||||
|
def test_export_DADF5_regrid(self,ref_path,tmp_path,fname):
|
||||||
|
r = Result(ref_path/fname)
|
||||||
|
m = grid_filters.regrid(r.size,np.broadcast_to(np.eye(3),tuple(r.cells)+(3,3)),r.cells*2)
|
||||||
|
r.export_DADF5(tmp_path/'regridded.hdf5',mapping=m)
|
||||||
|
assert np.all(Result(tmp_path/'regridded.hdf5').cells == r.cells*2)
|
||||||
|
|
Loading…
Reference in New Issue