DAMASK_EICMD/python/tests/test_Result.py

452 lines
20 KiB
Python
Raw Normal View History

2021-04-01 17:17:27 +05:30
import bz2
import pickle
2020-05-26 02:35:31 +05:30
import time
import shutil
import os
import sys
2020-05-26 02:35:31 +05:30
from datetime import datetime
import pytest
import numpy as np
2020-05-26 02:35:31 +05:30
import h5py
from damask import Result
from damask import Rotation
from damask import Orientation
from damask import tensor
from damask import mechanics
2020-07-31 20:20:01 +05:30
from damask import grid_filters
@pytest.fixture
def default(tmp_path,ref_path):
"""Small Result file in temp location for modification."""
fname = '12grains6x7x8_tensionY.hdf5'
shutil.copy(ref_path/fname,tmp_path)
2020-07-31 20:20:01 +05:30
f = Result(tmp_path/fname)
2021-01-13 19:27:58 +05:30
f.view('times',20.0)
return f
@pytest.fixture
def single_phase(tmp_path,ref_path):
"""Single phase Result file in temp location for modification."""
fname = '6grains6x7x8_single_phase_tensionY.hdf5'
shutil.copy(ref_path/fname,tmp_path)
2020-07-31 20:20:01 +05:30
return Result(tmp_path/fname)
@pytest.fixture
def ref_path(ref_path_base):
2019-11-27 17:49:58 +05:30
"""Directory containing reference results."""
return ref_path_base/'Result'
2021-04-01 17:17:27 +05:30
def dict_equal(d1, d2):
for k in d1:
if (k not in d2):
return False
else:
if type(d1[k]) is dict:
return dict_equal(d1[k],d2[k])
else:
if not np.allclose(d1[k],d2[k]):
return False
return True
class TestResult:
2019-12-04 10:29:52 +05:30
2020-04-21 14:47:15 +05:30
def test_self_report(self,default):
print(default)
2021-01-13 19:27:58 +05:30
def test_view_all(self,default):
default.view('increments',True)
2020-05-23 14:08:25 +05:30
a = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view('increments','*')
2020-05-23 14:08:25 +05:30
b = default.get_dataset_location('F')
2021-03-25 23:52:59 +05:30
default.view('increments',default.increments_in_range(0,np.iinfo(int).max))
2020-05-23 14:08:25 +05:30
c = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view('times',True)
2020-05-23 14:08:25 +05:30
d = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view('times','*')
2020-05-23 14:08:25 +05:30
e = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view('times',default.times_in_range(0.0,np.inf))
2020-05-23 14:08:25 +05:30
f = default.get_dataset_location('F')
assert a == b == c == d == e ==f
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
2021-01-13 19:27:58 +05:30
def test_view_none(self,default,what):
default.view(what,False)
2020-05-23 14:08:25 +05:30
a = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view(what,[])
2020-05-23 14:08:25 +05:30
b = default.get_dataset_location('F')
assert a == b == []
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
2021-01-13 19:27:58 +05:30
def test_view_more(self,default,what):
default.view(what,False)
default.view_more(what,'*')
2020-05-23 14:08:25 +05:30
a = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view(what,True)
2020-05-23 14:08:25 +05:30
b = default.get_dataset_location('F')
assert a == b
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
2021-01-13 19:27:58 +05:30
def test_view_less(self,default,what):
default.view(what,True)
default.view_less(what,'*')
2020-05-23 14:08:25 +05:30
a = default.get_dataset_location('F')
2021-01-13 19:27:58 +05:30
default.view(what,False)
2020-05-23 14:08:25 +05:30
b = default.get_dataset_location('F')
assert a == b == []
2021-01-13 19:27:58 +05:30
def test_view_invalid(self,default):
2020-05-23 14:08:25 +05:30
with pytest.raises(AttributeError):
2021-01-13 19:27:58 +05:30
default.view('invalid',True)
2019-12-04 10:29:52 +05:30
2019-12-04 10:45:32 +05:30
def test_add_absolute(self,default):
2020-11-06 02:44:49 +05:30
default.add_absolute('F_e')
loc = {'F_e': default.get_dataset_location('F_e'),
'|F_e|': default.get_dataset_location('|F_e|')}
in_memory = np.abs(default.read_dataset(loc['F_e'],0))
in_file = default.read_dataset(loc['|F_e|'],0)
assert np.allclose(in_memory,in_file)
2020-02-01 14:12:04 +05:30
2020-07-31 20:20:01 +05:30
@pytest.mark.parametrize('mode',['direct','function'])
def test_add_calculation(self,default,tmp_path,mode):
2020-07-31 20:20:01 +05:30
if mode == 'direct':
default.add_calculation('x','2.0*np.abs(#F#)-1.0','-','my notes')
else:
with open(tmp_path/'f.py','w') as f:
f.write("import numpy as np\ndef my_func(field):\n return 2.0*np.abs(field)-1.0\n")
sys.path.insert(0,str(tmp_path))
import f
default.enable_user_function(f.my_func)
2020-07-31 20:20:01 +05:30
default.add_calculation('x','my_func(#F#)','-','my notes')
2020-02-01 14:12:04 +05:30
loc = {'F': default.get_dataset_location('F'),
'x': default.get_dataset_location('x')}
in_memory = 2.0*np.abs(default.read_dataset(loc['F'],0))-1.0
in_file = default.read_dataset(loc['x'],0)
assert np.allclose(in_memory,in_file)
2020-11-18 03:26:22 +05:30
def test_add_stress_Cauchy(self,default):
default.add_stress_Cauchy('P','F')
loc = {'F': default.get_dataset_location('F'),
'P': default.get_dataset_location('P'),
'sigma':default.get_dataset_location('sigma')}
2020-11-18 03:26:22 +05:30
in_memory = mechanics.stress_Cauchy(default.read_dataset(loc['P'],0),
default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['sigma'],0)
assert np.allclose(in_memory,in_file)
2019-12-04 10:19:43 +05:30
def test_add_determinant(self,default):
default.add_determinant('P')
loc = {'P': default.get_dataset_location('P'),
'det(P)':default.get_dataset_location('det(P)')}
2020-03-17 16:52:48 +05:30
in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape(-1,1)
2019-12-04 10:19:43 +05:30
in_file = default.read_dataset(loc['det(P)'],0)
assert np.allclose(in_memory,in_file)
2019-12-04 10:45:32 +05:30
def test_add_deviator(self,default):
default.add_deviator('P')
loc = {'P' :default.get_dataset_location('P'),
's_P':default.get_dataset_location('s_P')}
2020-11-19 19:08:54 +05:30
in_memory = tensor.deviatoric(default.read_dataset(loc['P'],0))
2019-12-04 10:45:32 +05:30
in_file = default.read_dataset(loc['s_P'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('eigenvalue,function',[('max',np.amax),('min',np.amin)])
def test_add_eigenvalue(self,default,eigenvalue,function):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
default.add_eigenvalue('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'),
2020-06-25 01:04:51 +05:30
'lambda':default.get_dataset_location(f'lambda_{eigenvalue}(sigma)')}
in_memory = function(tensor.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
in_file = default.read_dataset(loc['lambda'],0)
2020-02-15 21:25:12 +05:30
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('eigenvalue,idx',[('max',2),('mid',1),('min',0)])
def test_add_eigenvector(self,default,eigenvalue,idx):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
default.add_eigenvector('sigma',eigenvalue)
2020-02-15 21:25:12 +05:30
loc = {'sigma' :default.get_dataset_location('sigma'),
2020-06-25 01:04:51 +05:30
'v(sigma)':default.get_dataset_location(f'v_{eigenvalue}(sigma)')}
in_memory = tensor.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
2020-02-15 21:25:12 +05:30
in_file = default.read_dataset(loc['v(sigma)'],0)
assert np.allclose(in_memory,in_file)
2020-05-23 12:43:45 +05:30
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
2020-07-01 01:13:57 +05:30
def test_add_IPF_color(self,default,d):
2020-12-02 19:15:47 +05:30
default.add_IPF_color(d,'O')
loc = {'O': default.get_dataset_location('O'),
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
qu = default.read_dataset(loc['O']).view(np.double).squeeze()
2021-03-25 23:52:59 +05:30
crystal_structure = default._get_attribute(default.get_dataset_location('O')[0],'lattice')
c = Orientation(rotation=qu,lattice=crystal_structure)
in_memory = np.uint8(c.IPF_color(np.array(d))*255)
2020-05-23 12:43:45 +05:30
in_file = default.read_dataset(loc['color'])
assert np.allclose(in_memory,in_file)
2020-02-15 21:25:12 +05:30
def test_add_maximum_shear(self,default):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
2020-02-15 21:25:12 +05:30
default.add_maximum_shear('sigma')
loc = {'sigma' :default.get_dataset_location('sigma'),
'max_shear(sigma)':default.get_dataset_location('max_shear(sigma)')}
in_memory = mechanics.maximum_shear(default.read_dataset(loc['sigma'],0)).reshape(-1,1)
in_file = default.read_dataset(loc['max_shear(sigma)'],0)
assert np.allclose(in_memory,in_file)
2020-02-16 14:34:33 +05:30
def test_add_Mises_strain(self,default):
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0
2020-11-16 05:42:23 +05:30
default.add_strain('F',t,m)
2020-06-25 01:04:51 +05:30
label = f'epsilon_{t}^{m}(F)'
2020-11-18 03:26:22 +05:30
default.add_equivalent_Mises(label)
2020-02-16 14:34:33 +05:30
loc = {label :default.get_dataset_location(label),
label+'_vM':default.get_dataset_location(label+'_vM')}
2020-11-18 03:26:22 +05:30
in_memory = mechanics.equivalent_strain_Mises(default.read_dataset(loc[label],0)).reshape(-1,1)
2020-02-16 14:34:33 +05:30
in_file = default.read_dataset(loc[label+'_vM'],0)
assert np.allclose(in_memory,in_file)
def test_add_Mises_stress(self,default):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
default.add_equivalent_Mises('sigma')
2020-02-16 14:34:33 +05:30
loc = {'sigma' :default.get_dataset_location('sigma'),
'sigma_vM':default.get_dataset_location('sigma_vM')}
2020-11-18 03:26:22 +05:30
in_memory = mechanics.equivalent_stress_Mises(default.read_dataset(loc['sigma'],0)).reshape(-1,1)
2020-02-16 14:34:33 +05:30
in_file = default.read_dataset(loc['sigma_vM'],0)
assert np.allclose(in_memory,in_file)
2020-11-06 04:17:37 +05:30
def test_add_Mises_invalid(self,default):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
2020-11-06 04:17:37 +05:30
default.add_calculation('sigma_y','#sigma#',unit='y')
2020-11-18 03:26:22 +05:30
default.add_equivalent_Mises('sigma_y')
2020-11-06 04:17:37 +05:30
assert default.get_dataset_location('sigma_y_vM') == []
def test_add_Mises_stress_strain(self,default):
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy('P','F')
2020-11-06 04:17:37 +05:30
default.add_calculation('sigma_y','#sigma#',unit='y')
default.add_calculation('sigma_x','#sigma#',unit='x')
2020-11-18 03:26:22 +05:30
default.add_equivalent_Mises('sigma_y',kind='strain')
default.add_equivalent_Mises('sigma_x',kind='stress')
2020-11-06 04:17:37 +05:30
loc = {'y' :default.get_dataset_location('sigma_y_vM'),
'x' :default.get_dataset_location('sigma_x_vM')}
assert not np.allclose(default.read_dataset(loc['y'],0),default.read_dataset(loc['x'],0))
2020-02-16 14:34:33 +05:30
2019-12-04 10:19:43 +05:30
def test_add_norm(self,default):
default.add_norm('F',1)
loc = {'F': default.get_dataset_location('F'),
'|F|_1':default.get_dataset_location('|F|_1')}
in_memory = np.linalg.norm(default.read_dataset(loc['F'],0),ord=1,axis=(1,2),keepdims=True)
in_file = default.read_dataset(loc['|F|_1'],0)
assert np.allclose(in_memory,in_file)
2020-11-18 03:26:22 +05:30
def test_add_stress_second_Piola_Kirchhoff(self,default):
default.add_stress_second_Piola_Kirchhoff('P','F')
loc = {'F':default.get_dataset_location('F'),
'P':default.get_dataset_location('P'),
'S':default.get_dataset_location('S')}
2020-11-18 03:26:22 +05:30
in_memory = mechanics.stress_second_Piola_Kirchhoff(default.read_dataset(loc['P'],0),
default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['S'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.skip(reason='requires rework of lattice.f90')
2020-05-23 11:49:08 +05:30
@pytest.mark.parametrize('polar',[True,False])
def test_add_pole(self,default,polar):
pole = np.array([1.,0.,0.])
2020-11-06 02:44:49 +05:30
default.add_pole('O',pole,polar)
loc = {'O': default.get_dataset_location('O'),
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'' if polar else 'xy'))}
rot = Rotation(default.read_dataset(loc['O']).view(np.double))
2020-05-23 11:49:08 +05:30
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
xy = rotated_pole[:,0:2]/(1.+abs(pole[2]))
in_memory = xy if not polar else \
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
in_file = default.read_dataset(loc['pole'])
assert np.allclose(in_memory,in_file)
2020-11-20 03:16:52 +05:30
def test_add_rotation(self,default):
default.add_rotation('F')
loc = {'F': default.get_dataset_location('F'),
'R(F)': default.get_dataset_location('R(F)')}
2020-11-20 03:16:52 +05:30
in_memory = mechanics.rotation(default.read_dataset(loc['F'],0)).as_matrix()
in_file = default.read_dataset(loc['R(F)'],0)
assert np.allclose(in_memory,in_file)
def test_add_spherical(self,default):
default.add_spherical('P')
loc = {'P': default.get_dataset_location('P'),
'p_P': default.get_dataset_location('p_P')}
2020-11-19 19:08:54 +05:30
in_memory = tensor.spherical(default.read_dataset(loc['P'],0),False).reshape(-1,1)
in_file = default.read_dataset(loc['p_P'],0)
assert np.allclose(in_memory,in_file)
2020-02-16 14:34:33 +05:30
def test_add_strain(self,default):
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0
2020-11-16 05:42:23 +05:30
default.add_strain('F',t,m)
2020-06-25 01:04:51 +05:30
label = f'epsilon_{t}^{m}(F)'
2020-02-16 14:34:33 +05:30
loc = {'F': default.get_dataset_location('F'),
label: default.get_dataset_location(label)}
2020-11-16 05:42:23 +05:30
in_memory = mechanics.strain(default.read_dataset(loc['F'],0),t,m)
2020-02-16 14:34:33 +05:30
in_file = default.read_dataset(loc[label],0)
assert np.allclose(in_memory,in_file)
def test_add_stretch_right(self,default):
default.add_stretch_tensor('F','U')
loc = {'F': default.get_dataset_location('F'),
'U(F)': default.get_dataset_location('U(F)')}
in_memory = mechanics.stretch_right(default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['U(F)'],0)
assert np.allclose(in_memory,in_file)
def test_add_stretch_left(self,default):
default.add_stretch_tensor('F','V')
loc = {'F': default.get_dataset_location('F'),
'V(F)': default.get_dataset_location('V(F)')}
in_memory = mechanics.stretch_left(default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['V(F)'],0)
assert np.allclose(in_memory,in_file)
2020-05-22 22:34:02 +05:30
def test_add_invalid(self,default):
with pytest.raises(TypeError):
default.add_calculation('#invalid#*2')
@pytest.mark.parametrize('overwrite',['off','on'])
def test_add_overwrite(self,default,overwrite):
2021-01-13 19:27:58 +05:30
default.view('times',default.times_in_range(0,np.inf)[-1])
2020-11-18 03:26:22 +05:30
default.add_stress_Cauchy()
2020-05-26 10:45:27 +05:30
loc = default.get_dataset_location('sigma')
with h5py.File(default.fname,'r') as f:
# h5py3 compatibility
try:
2021-03-25 23:52:59 +05:30
created_first = f[loc[0]].attrs['created'].decode()
except AttributeError:
2021-03-25 23:52:59 +05:30
created_first = f[loc[0]].attrs['created']
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
2020-06-01 15:03:22 +05:30
default.allow_modification()
else:
2020-06-01 15:03:22 +05:30
default.disallow_modification()
time.sleep(2.)
try:
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
except ValueError:
pass
with h5py.File(default.fname,'r') as f:
# h5py3 compatibility
try:
2021-03-25 23:52:59 +05:30
created_second = f[loc[0]].attrs['created'].decode()
except AttributeError:
2021-03-25 23:52:59 +05:30
created_second = f[loc[0]].attrs['created']
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
else:
assert created_first == created_second and not np.allclose(default.read_dataset(loc),311.)
2020-06-01 15:03:22 +05:30
@pytest.mark.parametrize('allowed',['off','on'])
def test_rename(self,default,allowed):
if allowed == 'on':
2020-06-03 18:32:39 +05:30
F = default.read_dataset(default.get_dataset_location('F'))
2020-06-01 15:03:22 +05:30
default.allow_modification()
default.rename('F','new_name')
assert np.all(F == default.read_dataset(default.get_dataset_location('new_name')))
default.disallow_modification()
with pytest.raises(PermissionError):
default.rename('P','another_new_name')
2020-07-31 20:20:01 +05:30
@pytest.mark.parametrize('mode',['cell','node'])
def test_coordinates(self,default,mode):
if mode == 'cell':
a = grid_filters.coordinates0_point(default.cells,default.size,default.origin)
b = default.coordinates0_point.reshape(tuple(default.cells)+(3,),order='F')
2020-07-31 20:20:01 +05:30
elif mode == 'node':
a = grid_filters.coordinates0_node(default.cells,default.size,default.origin)
b = default.coordinates0_node.reshape(tuple(default.cells+1)+(3,),order='F')
2020-07-31 20:20:01 +05:30
assert np.allclose(a,b)
2020-05-22 22:34:02 +05:30
@pytest.mark.parametrize('output',['F',[],['F','P']])
2020-05-26 03:09:19 +05:30
def test_vtk(self,tmp_path,default,output):
os.chdir(tmp_path)
default.save_VTK(output)
2020-10-27 21:16:08 +05:30
@pytest.mark.parametrize('mode',['point','cell'])
def test_vtk_mode(self,tmp_path,single_phase,mode):
os.chdir(tmp_path)
single_phase.save_VTK(mode=mode)
2020-10-27 21:16:08 +05:30
def test_XDMF(self,tmp_path,single_phase,update,ref_path):
for shape in [('scalar',()),('vector',(3,)),('tensor',(3,3)),('matrix',(12,))]:
for dtype in ['f4','f8','i1','i2','i4','i8','u1','u2','u4','u8']:
single_phase.add_calculation(f'{shape[0]}_{dtype}',f"np.ones(np.shape(#F#)[0:1]+{shape[1]},'{dtype}')")
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'.xdmf'
os.chdir(tmp_path)
single_phase.save_XDMF()
if update:
shutil.copy(tmp_path/fname,ref_path/fname)
assert sorted(open(tmp_path/fname).read()) == sorted(open(ref_path/fname).read()) # XML is not ordered
def test_XDMF_invalid(self,default):
with pytest.raises(TypeError):
default.save_XDMF()
2021-04-01 17:17:27 +05:30
2021-04-01 19:22:43 +05:30
@pytest.mark.parametrize('view,output,compress,strip',
2021-04-01 17:17:27 +05:30
[({},['F','P','F','L_p','F_e','F_p'],True,True),
({'increments':3},'F',True,True),
({'increments':[1,8,3,4,5,6,7]},['F','P'],True,True),
({'phases':['A','B']},['F','P'],True,True),
({'phases':['A','C'],'homogenizations':False},['F','P','O'],True,True),
({'phases':False,'homogenizations':False},['F','P','O'],True,True),
({'phases':False},['Delta_V'],True,True),
({},['u_p','u_n'],False,False)],
ids=list(range(8)))
2021-04-01 19:22:43 +05:30
def test_read(self,update,request,ref_path,view,output,compress,strip):
2021-04-01 17:17:27 +05:30
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
for key,value in view.items():
result.view(key,value)
N = request.node.name[8:].split('[')[1].split(']')[0]
2021-04-01 19:22:43 +05:30
cur = result.read(output,compress,strip)
2021-04-01 17:17:27 +05:30
if update:
with bz2.BZ2File(ref_path/f'read_{N}.pbz2','w') as f:
pickle.dump(cur,f)
2021-04-01 19:22:43 +05:30
2021-04-01 17:17:27 +05:30
with bz2.BZ2File(ref_path/f'read_{N}.pbz2') as f:
2021-04-01 19:22:43 +05:30
assert dict_equal(cur,pickle.load(f))
@pytest.mark.parametrize('view,output,compress,constituents,strip',
[({},['F','P','F','L_p','F_e','F_p'],True,True,None),
({'increments':3},'F',True,True,[0,1,2,3,4,5,6,7]),
({'increments':[1,8,3,4,5,6,7]},['F','P'],True,True,1),
({'phases':['A','B']},['F','P'],True,True,[1,2]),
({'phases':['A','C'],'homogenizations':False},['F','P','O'],True,True,[0,7]),
({'phases':False,'homogenizations':False},['F','P','O'],True,True,[1,2,3,4]),
({'phases':False},['Delta_V'],True,True,[1,2,4]),
({},['u_p','u_n'],False,False,None)],
ids=list(range(8)))
def test_place(self,update,request,ref_path,view,output,compress,strip,constituents):
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
for key,value in view.items():
result.view(key,value)
N = request.node.name[8:].split('[')[1].split(']')[0]
cur = result.place(output,compress,strip,constituents)
if update:
with bz2.BZ2File(ref_path/f'place_{N}.pbz2','w') as f:
pickle.dump(cur,f)
with bz2.BZ2File(ref_path/f'place_{N}.pbz2') as f:
assert dict_equal(cur,pickle.load(f))