diff --git a/python/damask/_result.py b/python/damask/_result.py index fb24f8f56..95dbd9fd3 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -109,7 +109,7 @@ class Result: if self.version_major != 0 or not 12 <= self.version_minor <= 14: raise TypeError(f'unsupported DADF5 version "{self.version_major}.{self.version_minor}"') if self.version_major == 0 and self.version_minor < 14: - self.export_setup = None # type: ignore + self.export_simulation_setup = None # type: ignore self.structured = 'cells' in f['geometry'].attrs.keys() @@ -561,6 +561,14 @@ class Result: print(f'Function {func.__name__} enabled in add_calculation.') + @property + def simulation_setup_files(self): + """Simulation setup files used to generate the Result object.""" + files = [] + with h5py.File(self.fname,'r') as f_in: + f_in['setup'].visititems(lambda name,obj: files.append(name) if isinstance(obj,h5py.Dataset) else None) + return files + @property def incs(self): return [int(i.split(prefix_inc)[-1]) for i in self.increments] @@ -1515,6 +1523,166 @@ class Result: pool.join() + def _mappings(self): + """Mappings to place data spatially.""" + with h5py.File(self.fname,'r') as f: + + at_cell_ph = [] + in_data_ph = [] + for c in range(self.N_constituents): + at_cell_ph.append({label: np.where(self.phase[:,c] == label)[0] \ + for label in self.visible['phases']}) + in_data_ph.append({label: f['/'.join(['cell_to','phase'])]['entry'][at_cell_ph[c][label]][:,c] \ + for label in self.visible['phases']}) + + at_cell_ho = {label: np.where(self.homogenization[:] == label)[0] \ + for label in self.visible['homogenizations']} + in_data_ho = {label: f['/'.join(['cell_to','homogenization'])]['entry'][at_cell_ho[label]] \ + for label in self.visible['homogenizations']} + + return at_cell_ph,in_data_ph,at_cell_ho,in_data_ho + + + def get(self, + output: Union[str, List[str]] = '*', + flatten: bool = True, + prune: bool = True): + """ + Collect data per phase/homogenization reflecting the group/folder structure in the DADF5 file. + + Parameters + ---------- + output : (list of) str, optional + Names of the datasets to read. + Defaults to '*', in which case all datasets are read. + flatten : bool, optional + Remove singular levels of the folder hierarchy. + This might be beneficial in case of single increment, + phase/homogenization, or field. Defaults to True. + prune : bool, optional + Remove branches with no data. Defaults to True. + + Returns + ------- + data : dict of numpy.ndarray + Datasets structured by phase/homogenization and according to selected view. + + """ + r = {} # type: ignore + + with h5py.File(self.fname,'r') as f: + for inc in util.show_progress(self.visible['increments']): + r[inc] = {'phase':{},'homogenization':{},'geometry':{}} + + for out in _match(output,f['/'.join([inc,'geometry'])].keys()): + r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])]) + + for ty in ['phase','homogenization']: + for label in self.visible[ty+'s']: + r[inc][ty][label] = {} + for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()): + r[inc][ty][label][field] = {} + for out in _match(output,f['/'.join([inc,ty,label,field])].keys()): + r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])]) + + if prune: r = util.dict_prune(r) + if flatten: r = util.dict_flatten(r) + + return None if (type(r) == dict and r == {}) else r + + + def place(self, + output: Union[str, List[str]] = '*', + flatten: bool = True, + prune: bool = True, + constituents: IntSequence = None, + fill_float: float = np.nan, + fill_int: int = 0): + """ + Merge data into spatial order that is compatible with the damask.VTK geometry representation. + + The returned data structure reflects the group/folder structure in the DADF5 file. + + Multi-phase data is fused into a single output. + `place` is equivalent to `get` if only one phase/homogenization + and one constituent is present. + + Parameters + ---------- + output : (list of) str, optional + Names of the datasets to read. + Defaults to '*', in which case all visible datasets are placed. + flatten : bool, optional + Remove singular levels of the folder hierarchy. + This might be beneficial in case of single increment or field. + Defaults to True. + prune : bool, optional + Remove branches with no data. Defaults to True. + constituents : (list of) int, optional + Constituents to consider. + Defaults to None, in which case all constituents are considered. + fill_float : float, optional + Fill value for non-existent entries of floating point type. + Defaults to NaN. + fill_int : int, optional + Fill value for non-existent entries of integer type. + Defaults to 0. + + Returns + ------- + data : dict of numpy.ma.MaskedArray + Datasets structured by spatial position and according to selected view. + + """ + r = {} # type: ignore + + constituents_ = list(map(int,constituents)) if isinstance(constituents,Iterable) else \ + (range(self.N_constituents) if constituents is None else [constituents]) # type: ignore + + suffixes = [''] if self.N_constituents == 1 or isinstance(constituents,int) else \ + [f'#{c}' for c in constituents_] + + at_cell_ph,in_data_ph,at_cell_ho,in_data_ho = self._mappings() + + with h5py.File(self.fname,'r') as f: + + for inc in util.show_progress(self.visible['increments']): + r[inc] = {'phase':{},'homogenization':{},'geometry':{}} + + for out in _match(output,f['/'.join([inc,'geometry'])].keys()): + r[inc]['geometry'][out] = ma.array(_read(f['/'.join([inc,'geometry',out])]),fill_value = fill_float) + + for ty in ['phase','homogenization']: + for label in self.visible[ty+'s']: + for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()): + if field not in r[inc][ty].keys(): + r[inc][ty][field] = {} + + for out in _match(output,f['/'.join([inc,ty,label,field])].keys()): + data = ma.array(_read(f['/'.join([inc,ty,label,field,out])])) + + if ty == 'phase': + if out+suffixes[0] not in r[inc][ty][field].keys(): + for c,suffix in zip(constituents_,suffixes): + r[inc][ty][field][out+suffix] = \ + _empty_like(data,self.N_materialpoints,fill_float,fill_int) + + for c,suffix in zip(constituents_,suffixes): + r[inc][ty][field][out+suffix][at_cell_ph[c][label]] = data[in_data_ph[c][label]] + + if ty == 'homogenization': + if out not in r[inc][ty][field].keys(): + r[inc][ty][field][out] = \ + _empty_like(data,self.N_materialpoints,fill_float,fill_int) + + r[inc][ty][field][out][at_cell_ho[label]] = data[in_data_ho[label]] + + if prune: r = util.dict_prune(r) + if flatten: r = util.dict_flatten(r) + + return None if (type(r) == dict and r == {}) else r + + def export_XDMF(self, output: Union[str, List[str]] = '*', target_dir: Union[str, Path] = None, @@ -1642,26 +1810,6 @@ class Result: f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml()) - def _mappings(self): - """Mappings to place data spatially.""" - with h5py.File(self.fname,'r') as f: - - at_cell_ph = [] - in_data_ph = [] - for c in range(self.N_constituents): - at_cell_ph.append({label: np.where(self.phase[:,c] == label)[0] \ - for label in self.visible['phases']}) - in_data_ph.append({label: f['/'.join(['cell_to','phase'])]['entry'][at_cell_ph[c][label]][:,c] \ - for label in self.visible['phases']}) - - at_cell_ho = {label: np.where(self.homogenization[:] == label)[0] \ - for label in self.visible['homogenizations']} - in_data_ho = {label: f['/'.join(['cell_to','homogenization'])]['entry'][at_cell_ho[label]] \ - for label in self.visible['homogenizations']} - - return at_cell_ph,in_data_ph,at_cell_ho,in_data_ho - - def export_VTK(self, output: Union[str,List[str]] = '*', mode: str = 'cell', @@ -1682,7 +1830,7 @@ class Result: ---------- output : (list of) str, optional Names of the datasets to export to the VTK file. - Defaults to '*', in which case all datasets are exported. + Defaults to '*', in which case all visible datasets are exported. mode : {'cell', 'point'}, optional Export in cell format or point format. Defaults to 'cell'. @@ -1766,54 +1914,6 @@ class Result: v.save(vtk_dir/f'{self.fname.stem}_inc{inc.split(prefix_inc)[-1].zfill(N_digits)}', parallel=parallel) - def get(self, - output: Union[str, List[str]] = '*', - flatten: bool = True, - prune: bool = True): - """ - Collect data per phase/homogenization reflecting the group/folder structure in the DADF5 file. - - Parameters - ---------- - output : (list of) str, optional - Names of the datasets to read. - Defaults to '*', in which case all datasets are read. - flatten : bool, optional - Remove singular levels of the folder hierarchy. - This might be beneficial in case of single increment, - phase/homogenization, or field. Defaults to True. - prune : bool, optional - Remove branches with no data. Defaults to True. - - Returns - ------- - data : dict of numpy.ndarray - Datasets structured by phase/homogenization and according to selected view. - - """ - r = {} # type: ignore - - with h5py.File(self.fname,'r') as f: - for inc in util.show_progress(self.visible['increments']): - r[inc] = {'phase':{},'homogenization':{},'geometry':{}} - - for out in _match(output,f['/'.join([inc,'geometry'])].keys()): - r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])]) - - for ty in ['phase','homogenization']: - for label in self.visible[ty+'s']: - r[inc][ty][label] = {} - for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()): - r[inc][ty][label][field] = {} - for out in _match(output,f['/'.join([inc,ty,label,field])].keys()): - r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])]) - - if prune: r = util.dict_prune(r) - if flatten: r = util.dict_flatten(r) - - return None if (type(r) == dict and r == {}) else r - - def export_DADF5(self, fname, output: Union[str, List[str]] = '*'): @@ -1858,116 +1958,23 @@ class Result: f_in[p].copy(out,f_out[p]) - def place(self, - output: Union[str, List[str]] = '*', - flatten: bool = True, - prune: bool = True, - constituents: IntSequence = None, - fill_float: float = np.nan, - fill_int: int = 0): - """ - Merge data into spatial order that is compatible with the damask.VTK geometry representation. - - The returned data structure reflects the group/folder structure - in the DADF5 file. - - Multi-phase data is fused into a single output. - `place` is equivalent to `get` if only one phase/homogenization - and one constituent is present. - - Parameters - ---------- - output : (list of) str, optional - Names of the datasets to read. - Defaults to '*', in which case all datasets are placed. - flatten : bool, optional - Remove singular levels of the folder hierarchy. - This might be beneficial in case of single increment or field. - Defaults to True. - prune : bool, optional - Remove branches with no data. Defaults to True. - constituents : (list of) int, optional - Constituents to consider. - Defaults to None, in which case all constituents are considered. - fill_float : float, optional - Fill value for non-existent entries of floating point type. - Defaults to NaN. - fill_int : int, optional - Fill value for non-existent entries of integer type. - Defaults to 0. - - Returns - ------- - data : dict of numpy.ma.MaskedArray - Datasets structured by spatial position and according to selected view. - - """ - r = {} # type: ignore - - constituents_ = list(map(int,constituents)) if isinstance(constituents,Iterable) else \ - (range(self.N_constituents) if constituents is None else [constituents]) # type: ignore - - suffixes = [''] if self.N_constituents == 1 or isinstance(constituents,int) else \ - [f'#{c}' for c in constituents_] - - at_cell_ph,in_data_ph,at_cell_ho,in_data_ho = self._mappings() - - with h5py.File(self.fname,'r') as f: - - for inc in util.show_progress(self.visible['increments']): - r[inc] = {'phase':{},'homogenization':{},'geometry':{}} - - for out in _match(output,f['/'.join([inc,'geometry'])].keys()): - r[inc]['geometry'][out] = ma.array(_read(f['/'.join([inc,'geometry',out])]),fill_value = fill_float) - - for ty in ['phase','homogenization']: - for label in self.visible[ty+'s']: - for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()): - if field not in r[inc][ty].keys(): - r[inc][ty][field] = {} - - for out in _match(output,f['/'.join([inc,ty,label,field])].keys()): - data = ma.array(_read(f['/'.join([inc,ty,label,field,out])])) - - if ty == 'phase': - if out+suffixes[0] not in r[inc][ty][field].keys(): - for c,suffix in zip(constituents_,suffixes): - r[inc][ty][field][out+suffix] = \ - _empty_like(data,self.N_materialpoints,fill_float,fill_int) - - for c,suffix in zip(constituents_,suffixes): - r[inc][ty][field][out+suffix][at_cell_ph[c][label]] = data[in_data_ph[c][label]] - - if ty == 'homogenization': - if out not in r[inc][ty][field].keys(): - r[inc][ty][field][out] = \ - _empty_like(data,self.N_materialpoints,fill_float,fill_int) - - r[inc][ty][field][out][at_cell_ho[label]] = data[in_data_ho[label]] - - if prune: r = util.dict_prune(r) - if flatten: r = util.dict_flatten(r) - - return None if (type(r) == dict and r == {}) else r - - - def export_setup(self, + def export_simulation_setup(self, output: Union[str, List[str]] = '*', target_dir: Union[str, Path] = None, overwrite: bool = False, ): """ - Export configuration files. + Export original simulation setup of the Result object. Parameters ---------- output : (list of) str, optional Names of the datasets to export to the file. - Defaults to '*', in which case all datasets are exported. + Defaults to '*', in which case all setup files are exported. target_dir : str or pathlib.Path, optional - Directory to save configuration files. Will be created if non-existent. + Directory to save setup files. Will be created if non-existent. overwrite : bool, optional - Overwrite existing configuration files. + Overwrite any existing setup files. Defaults to False. """ @@ -1980,17 +1987,13 @@ class Result: cfg = cfg_dir/name if type(obj) == h5py.Dataset and _match(output,[name]): - d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode() - if overwrite or not cfg.exists(): - with util.open_text(cfg,'w') as f_out: f_out.write(obj[0].decode()) - print(f'Exported {d} to "{cfg}".') + if cfg.exists() and not overwrite: + raise PermissionError(f'"{cfg}" exists') else: - print(f'"{cfg}" exists, {d} not exported.') - elif type(obj) == h5py.Group: - cfg.mkdir(parents=True,exist_ok=True) + cfg.parent.mkdir(parents=True,exist_ok=True) + with util.open_text(cfg,'w') as f_out: f_out.write(obj[0].decode()) cfg_dir = (Path.cwd() if target_dir is None else Path(target_dir)) - cfg_dir.mkdir(parents=True,exist_ok=True) with h5py.File(self.fname,'r') as f_in: f_in['setup'].visititems(partial(export, output=output, diff --git a/python/tests/reference/Result/12grains6x7x8_tensionY.hdf5 b/python/tests/reference/Result/12grains6x7x8_tensionY.hdf5 index 6cfc9b4b4..de5082a0d 100644 Binary files a/python/tests/reference/Result/12grains6x7x8_tensionY.hdf5 and b/python/tests/reference/Result/12grains6x7x8_tensionY.hdf5 differ diff --git a/python/tests/reference/Result/12grains6x7x8_tensionY_restart.hdf5 b/python/tests/reference/Result/12grains6x7x8_tensionY_restart.hdf5 new file mode 100644 index 000000000..d3c5a8004 Binary files /dev/null and b/python/tests/reference/Result/12grains6x7x8_tensionY_restart.hdf5 differ diff --git a/python/tests/reference/Result/12grains6x7x8_tensionY_restart_restart.hdf5 b/python/tests/reference/Result/12grains6x7x8_tensionY_restart_restart.hdf5 new file mode 100644 index 000000000..64430ba9a Binary files /dev/null and b/python/tests/reference/Result/12grains6x7x8_tensionY_restart_restart.hdf5 differ diff --git a/python/tests/reference/Result/4grains2x4x3.material.yaml b/python/tests/reference/Result/4grains2x4x3.material.yaml index aebdc0338..96be5f483 100644 --- a/python/tests/reference/Result/4grains2x4x3.material.yaml +++ b/python/tests/reference/Result/4grains2x4x3.material.yaml @@ -648,7 +648,7 @@ phase: atol_xi: 1.0 dot_gamma_0_sl: 0.001 h_0_sl-sl: 75e6 - h_sl-sl: [1, 1, 1.4, 1.4, 1.4, 1.4] + h_sl-sl: [1, 1, 1.4, 1.4, 1.4, 1.4, 1.4] n_sl: 20 output: [xi_sl] type: phenopowerlaw diff --git a/python/tests/reference/Result/4grains2x4x3_compressionY.hdf5 b/python/tests/reference/Result/4grains2x4x3_compressionY.hdf5 index 369cab8fc..2d1d0ee33 100644 Binary files a/python/tests/reference/Result/4grains2x4x3_compressionY.hdf5 and b/python/tests/reference/Result/4grains2x4x3_compressionY.hdf5 differ diff --git a/python/tests/reference/Result/6grains6x7x8_single_phase_tensionY.hdf5 b/python/tests/reference/Result/6grains6x7x8_single_phase_tensionY.hdf5 index bab0b0ead..cb7838332 100644 Binary files a/python/tests/reference/Result/6grains6x7x8_single_phase_tensionY.hdf5 and b/python/tests/reference/Result/6grains6x7x8_single_phase_tensionY.hdf5 differ diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-0].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-0].md5 new file mode 100644 index 000000000..80b468fbb --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-0].md5 @@ -0,0 +1 @@ +85ce65348539cc823f103a29e3e237f0 diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-1].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-1].md5 new file mode 100644 index 000000000..20beaee70 --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-1].md5 @@ -0,0 +1 @@ +27972d6a0955e4e6e27a6ac5762abda8 diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-2].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-2].md5 new file mode 100644 index 000000000..baedd0b60 --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-2].md5 @@ -0,0 +1 @@ +3971cf9ac0a9598d3171c049ebb213f3 diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-3].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-3].md5 new file mode 100644 index 000000000..65ee728f7 --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[0-0-3].md5 @@ -0,0 +1 @@ +e7d52ecf304e56e6ef81702b5a5845f3 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[1-0-0].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-0].md5 similarity index 100% rename from python/tests/reference/Result/export_VTK/test_vtk[1-0-0].md5 rename to python/tests/reference/Result/export_VTK/test_export_vtk[1-0-0].md5 diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-1].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-1].md5 new file mode 100644 index 000000000..9f551820c --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-1].md5 @@ -0,0 +1 @@ +dd71d25ccb52c3fdfd2ab727fc852a98 diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-2].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-2].md5 new file mode 100644 index 000000000..4d7ea4021 --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-2].md5 @@ -0,0 +1 @@ +17dfe701be0c3a5e1a29d7a2b49c3afe diff --git a/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-3].md5 b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-3].md5 new file mode 100644 index 000000000..73ca96212 --- /dev/null +++ b/python/tests/reference/Result/export_VTK/test_export_vtk[1-0-3].md5 @@ -0,0 +1 @@ +ff8883bb1d5c3de706e7c69bc67a366d diff --git a/python/tests/reference/Result/export_VTK/test_vtk[0-0-0].md5 b/python/tests/reference/Result/export_VTK/test_vtk[0-0-0].md5 deleted file mode 100644 index dc8144b94..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[0-0-0].md5 +++ /dev/null @@ -1 +0,0 @@ -a40baead936c79dd4f86f84ad858b9fa diff --git a/python/tests/reference/Result/export_VTK/test_vtk[0-0-1].md5 b/python/tests/reference/Result/export_VTK/test_vtk[0-0-1].md5 deleted file mode 100644 index b5782429c..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[0-0-1].md5 +++ /dev/null @@ -1 +0,0 @@ -6fb37bd65934de859dd6b6e0191e7d64 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[0-0-2].md5 b/python/tests/reference/Result/export_VTK/test_vtk[0-0-2].md5 deleted file mode 100644 index 92d3956b6..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[0-0-2].md5 +++ /dev/null @@ -1 +0,0 @@ -61953c35f61f3234b98d78a912e7dc83 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[0-0-3].md5 b/python/tests/reference/Result/export_VTK/test_vtk[0-0-3].md5 deleted file mode 100644 index 42ccd0e25..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[0-0-3].md5 +++ /dev/null @@ -1 +0,0 @@ -bb783bb80ff04dd435e814f4b82a3234 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[1-0-1].md5 b/python/tests/reference/Result/export_VTK/test_vtk[1-0-1].md5 deleted file mode 100644 index c53be5801..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[1-0-1].md5 +++ /dev/null @@ -1 +0,0 @@ -e1ca5306082fc3ab411f5ddab1a2e370 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[1-0-2].md5 b/python/tests/reference/Result/export_VTK/test_vtk[1-0-2].md5 deleted file mode 100644 index 90f0ebc9c..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[1-0-2].md5 +++ /dev/null @@ -1 +0,0 @@ -1641c3b3641e942ffc325d471bdfaf00 diff --git a/python/tests/reference/Result/export_VTK/test_vtk[1-0-3].md5 b/python/tests/reference/Result/export_VTK/test_vtk[1-0-3].md5 deleted file mode 100644 index cfe43f9cb..000000000 --- a/python/tests/reference/Result/export_VTK/test_vtk[1-0-3].md5 +++ /dev/null @@ -1 +0,0 @@ -ba97286c5d95bf817143f7bb9cf58421 diff --git a/python/tests/reference/Result/get/test_get[0].pbz2 b/python/tests/reference/Result/get/test_get[0].pbz2 index e4ef0bde9..9a598fd44 100644 Binary files a/python/tests/reference/Result/get/test_get[0].pbz2 and b/python/tests/reference/Result/get/test_get[0].pbz2 differ diff --git a/python/tests/reference/Result/get/test_get[2].pbz2 b/python/tests/reference/Result/get/test_get[2].pbz2 index c8398ab2f..e85ab0a06 100644 Binary files a/python/tests/reference/Result/get/test_get[2].pbz2 and b/python/tests/reference/Result/get/test_get[2].pbz2 differ diff --git a/python/tests/reference/Result/get/test_get[3].pbz2 b/python/tests/reference/Result/get/test_get[3].pbz2 index d2eb08545..b28255817 100644 Binary files a/python/tests/reference/Result/get/test_get[3].pbz2 and b/python/tests/reference/Result/get/test_get[3].pbz2 differ diff --git a/python/tests/reference/Result/get/test_get[4].pbz2 b/python/tests/reference/Result/get/test_get[4].pbz2 index c10fa916b..d293c5c52 100644 Binary files a/python/tests/reference/Result/get/test_get[4].pbz2 and b/python/tests/reference/Result/get/test_get[4].pbz2 differ diff --git a/python/tests/reference/Result/get/test_get[6].pbz2 b/python/tests/reference/Result/get/test_get[6].pbz2 index 4459d2e59..9f1706371 100644 Binary files a/python/tests/reference/Result/get/test_get[6].pbz2 and b/python/tests/reference/Result/get/test_get[6].pbz2 differ diff --git a/python/tests/reference/Result/get/test_get[7].pbz2 b/python/tests/reference/Result/get/test_get[7].pbz2 index c1e4c31e9..67fb5ff13 100644 Binary files a/python/tests/reference/Result/get/test_get[7].pbz2 and b/python/tests/reference/Result/get/test_get[7].pbz2 differ diff --git a/python/tests/reference/Result/material.yaml b/python/tests/reference/Result/material.yaml new file mode 120000 index 000000000..6bcb65e61 --- /dev/null +++ b/python/tests/reference/Result/material.yaml @@ -0,0 +1 @@ +12grains6x7x8.material.yaml \ No newline at end of file diff --git a/python/tests/reference/Result/place/test_place[0].pbz2 b/python/tests/reference/Result/place/test_place[0].pbz2 index 2d1d432b9..fdfef5961 100644 Binary files a/python/tests/reference/Result/place/test_place[0].pbz2 and b/python/tests/reference/Result/place/test_place[0].pbz2 differ diff --git a/python/tests/reference/Result/place/test_place[2].pbz2 b/python/tests/reference/Result/place/test_place[2].pbz2 index cfc94f1ee..19caac392 100644 Binary files a/python/tests/reference/Result/place/test_place[2].pbz2 and b/python/tests/reference/Result/place/test_place[2].pbz2 differ diff --git a/python/tests/reference/Result/place/test_place[3].pbz2 b/python/tests/reference/Result/place/test_place[3].pbz2 index 96f782ae2..1ef2bfec3 100644 Binary files a/python/tests/reference/Result/place/test_place[3].pbz2 and b/python/tests/reference/Result/place/test_place[3].pbz2 differ diff --git a/python/tests/reference/Result/place/test_place[4].pbz2 b/python/tests/reference/Result/place/test_place[4].pbz2 index f2f744121..beae7e608 100644 Binary files a/python/tests/reference/Result/place/test_place[4].pbz2 and b/python/tests/reference/Result/place/test_place[4].pbz2 differ diff --git a/python/tests/reference/Result/place/test_place[6].pbz2 b/python/tests/reference/Result/place/test_place[6].pbz2 index 2844addcb..0246231f4 100644 Binary files a/python/tests/reference/Result/place/test_place[6].pbz2 and b/python/tests/reference/Result/place/test_place[6].pbz2 differ diff --git a/python/tests/reference/Result/place/test_place[7].pbz2 b/python/tests/reference/Result/place/test_place[7].pbz2 index cd4f098a1..808f4c514 100644 Binary files a/python/tests/reference/Result/place/test_place[7].pbz2 and b/python/tests/reference/Result/place/test_place[7].pbz2 differ diff --git a/python/tests/reference/Result/tensionY_restart.yaml b/python/tests/reference/Result/tensionY_restart.yaml new file mode 100644 index 000000000..89cfcc325 --- /dev/null +++ b/python/tests/reference/Result/tensionY_restart.yaml @@ -0,0 +1,18 @@ +--- +solver: + mechanical: spectral_basic + +loadstep: + - boundary_conditions: + mechanical: + dot_F: [[x, 0, 0], + [0, 1.0e-3, 0], + [0, 0, x]] + P: [[0, x, x], + [x, x, x], + [x, x, 0]] + discretization: + t: 10 + N: 20 + f_out: 4 + f_restart: 20 diff --git a/python/tests/test_Result.py b/python/tests/test_Result.py index 63141488f..e4a514f1e 100644 --- a/python/tests/test_Result.py +++ b/python/tests/test_Result.py @@ -294,7 +294,7 @@ class TestResult: default.add_curl('x') in_file = default.place('curl(x)') in_memory = grid_filters.curl(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape) - assert (in_file==in_memory).all() + assert (in_file == in_memory).all() @pytest.mark.parametrize('shape',['vector','tensor']) def test_add_divergence(self,default,shape): @@ -304,7 +304,7 @@ class TestResult: default.add_divergence('x') in_file = default.place('divergence(x)') in_memory = grid_filters.divergence(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape) - assert (in_file==in_memory).all() + assert (in_file == in_memory).all() @pytest.mark.parametrize('shape',['scalar','pseudo_scalar','vector']) def test_add_gradient(self,default,shape): @@ -315,7 +315,7 @@ class TestResult: default.add_gradient('x') in_file = default.place('gradient(x)') in_memory = grid_filters.gradient(default.size,x.reshape(tuple(default.cells)+x.shape[1:])).reshape(in_file.shape) - assert (in_file==in_memory).all() + assert (in_file == in_memory).all() @pytest.mark.parametrize('overwrite',['off','on']) def test_add_overwrite(self,default,overwrite): @@ -338,7 +338,7 @@ class TestResult: created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z') if overwrite == 'on': - assert created_first < created_second and np.allclose(last.place('sigma'),311.) + assert created_first < created_second and np.allclose(last.place('sigma'),311.) else: assert created_first == created_second and not np.allclose(last.place('sigma'),311.) @@ -378,7 +378,7 @@ class TestResult: @pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1)) @pytest.mark.parametrize('inc',[4,0],ids=range(2)) @pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<9, reason='missing "Direction" attribute') - def test_vtk(self,request,tmp_path,ref_path,update,patch_execution_stamp,patch_datetime_now,output,fname,inc): + def test_export_vtk(self,request,tmp_path,ref_path,update,patch_execution_stamp,patch_datetime_now,output,fname,inc): result = Result(ref_path/fname).view(increments=inc) result.export_VTK(output,target_dir=tmp_path,parallel=False) fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti' @@ -395,7 +395,7 @@ class TestResult: @pytest.mark.parametrize('mode',['point','cell']) @pytest.mark.parametrize('output',[False,True]) - def test_vtk_marc(self,tmp_path,ref_path,mode,output): + def test_export_vtk_marc(self,tmp_path,ref_path,mode,output): os.chdir(tmp_path) result = Result(ref_path/'check_compile_job1.hdf5') result.export_VTK(output,mode) @@ -418,7 +418,7 @@ class TestResult: def test_vtk_custom_path(self,tmp_path,single_phase): export_dir = tmp_path/'export_dir' single_phase.export_VTK(mode='point',target_dir=export_dir,parallel=False) - assert set(os.listdir(export_dir))==set([f'{single_phase.fname.stem}_inc{i:02}.vtp' for i in range(0,40+1,4)]) + assert set(os.listdir(export_dir)) == set([f'{single_phase.fname.stem}_inc{i:02}.vtp' for i in range(0,40+1,4)]) def test_XDMF_datatypes(self,tmp_path,single_phase,update,ref_path): for what,shape in {'scalar':(),'vector':(3,),'tensor':(3,3),'matrix':(12,)}.items(): @@ -538,30 +538,53 @@ class TestResult: ref = pickle.load(f) assert cur is None if ref is None else dict_equal(cur,ref) + def test_simulation_setup_files(self,default): + assert set(default.simulation_setup_files) == set(['12grains6x7x8.vti', + 'material.yaml', + 'tensionY.yaml', + 'previous/12grains6x7x8.vti', + 'previous/material.yaml', + 'previous/tensionY.yaml']) + + def test_export_simulation_setup_files(self,tmp_path,default): + sub = 'deep/down' + default.export_simulation_setup(target_dir=tmp_path/sub,overwrite=True) + for f in default.simulation_setup_files: + assert (tmp_path/sub/f).exists() + + def test_export_simulation_setup_overwrite(self,tmp_path,default): + os.chdir(tmp_path) + default.export_simulation_setup('material.yaml',overwrite=True) + with pytest.raises(PermissionError): + default.export_simulation_setup('material.yaml',overwrite=False) + + @pytest.mark.parametrize('output',['12grains6x7x8.vti', + 'tensionY.yaml', + ]) + def test_export_simulation_setup_content(self,ref_path,tmp_path,default,output): + default.export_simulation_setup(output,target_dir=tmp_path,overwrite=True) + assert open(tmp_path/output).read() == open(ref_path/output).read() @pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5', '6grains6x7x8_single_phase_tensionY.hdf5']) @pytest.mark.parametrize('output',['material.yaml','*']) - @pytest.mark.parametrize('overwrite',[True,False]) - def test_export_setup(self,ref_path,tmp_path,fname,output,overwrite): + def test_export_simulation_setup_consistency(self,ref_path,tmp_path,fname,output): r = Result(ref_path/fname) - r.export_setup(output,target_dir=tmp_path) + r.export_simulation_setup(output,target_dir=tmp_path) with h5py.File(ref_path/fname,'r') as f_hdf5: for file in fnmatch.filter(f_hdf5['setup'].keys(),output): with open(tmp_path/file) as f: assert f_hdf5[f'setup/{file}'][()][0].decode() == f.read() - r.export_setup(output,target_dir=tmp_path,overwrite=overwrite) - def test_export_setup_custom_path(self,ref_path,tmp_path): - src = ref_path/'4grains2x4x3_compressionY.hdf5' + def test_export_simulation_setup_custom_path(self,ref_path,tmp_path): subdir = 'export_dir' absdir = tmp_path/subdir - absdir.mkdir() + absdir.mkdir(exist_ok=True) - r = Result(src) + r = Result(ref_path/'4grains2x4x3_compressionY.hdf5') for t,cwd in zip([absdir,subdir,None],[tmp_path,tmp_path,absdir]): os.chdir(cwd) - r.export_setup('material.yaml',target_dir=t) + r.export_simulation_setup('material.yaml',target_dir=t) assert 'material.yaml' in os.listdir(absdir); (absdir/'material.yaml').unlink() @pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',