Merge branch 'with-to-file-methods' into 'development'

modified file writing of geom and table to a general to_file() that takes a "format=" argument

See merge request damask/DAMASK!213
This commit is contained in:
Franz Roters 2020-09-04 10:27:30 +02:00
commit 23d6151197
31 changed files with 202 additions and 149 deletions

@ -1 +1 @@
Subproject commit 1ca2223c68475bbcb9da633353dbe4a98c18db0d Subproject commit c6dc3f116fc6007caf076772d5c29f0a0523544d

View File

@ -49,4 +49,4 @@ for name in filenames:
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:])) table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -45,4 +45,4 @@ for name in filenames:
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -60,4 +60,4 @@ for filename in options.filenames:
os.mkdir(dirname,0o755) os.mkdir(dirname,0o755)
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0], file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits)) inc[3:].zfill(N_digits))
table.to_ASCII(os.path.join(dirname,file_out)) table.to_file(os.path.join(dirname,file_out))

View File

@ -191,4 +191,4 @@ for name in filenames:
volumeMismatch.reshape(-1,1,order='F'), volumeMismatch.reshape(-1,1,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -71,4 +71,4 @@ for name in filenames:
derivative(table.get(options.coordinates),table.get(label)), derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -60,7 +60,7 @@ for name in filenames:
table.add('fluct({}).{}'.format(options.f,options.pos), table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
else: else:
table.add('avg({}).{}'.format(options.f,options.pos), table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
@ -68,4 +68,4 @@ for name in filenames:
table.add('fluct({}).{}'.format(options.f,options.pos), table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -184,4 +184,4 @@ for name in filenames:
distance[i,:], distance[i,:],
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -73,4 +73,4 @@ for name in filenames:
mode = 'wrap' if options.periodic else 'nearest'), mode = 'wrap' if options.periodic else 'nearest'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -61,4 +61,4 @@ for name in filenames:
for data in options.label: for data in options.label:
table.add(data+'_addIndexed',indexedTable.get(data)[idx],scriptID+' '+' '.join(sys.argv[1:])) table.add(data+'_addIndexed',indexedTable.get(data)[idx],scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -147,4 +147,4 @@ for name in filenames:
if 'axisangle' in options.output: if 'axisangle' in options.output:
table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:])) table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -189,4 +189,4 @@ for name in filenames:
for i,label in enumerate(labels): for i,label in enumerate(labels):
table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -58,4 +58,4 @@ for name in filenames:
rng.shuffle(uniques) rng.shuffle(uniques)
table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -154,4 +154,4 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(os.path.splitext(name)[0]+'.geom',pack=False) geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False)

View File

@ -89,4 +89,4 @@ geom=damask.Geom(microstructure,options.size,
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])]) comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else name,pack=False) geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -142,4 +142,4 @@ geom = damask.Geom(microstructure.reshape(grid),
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else name,pack=False) geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -105,4 +105,5 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
format='ASCII',pack=False)

View File

@ -228,4 +228,5 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
format='ASCII',pack=False)

View File

@ -172,4 +172,4 @@ for name in filenames:
geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]) geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False) geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -90,7 +90,7 @@ class myThread (threading.Thread):
direction[i]*=2. direction[i]*=2.
i+= 1 i+= 1
perturbedSeedsTable.set('pos',coords) perturbedSeedsTable.set('pos',coords)
perturbedSeedsTable.to_ASCII(perturbedSeedsVFile) perturbedSeedsTable.to_file(perturbedSeedsVFile)
#--- do tesselation with perturbed seed file ------------------------------------------------------ #--- do tesselation with perturbed seed file ------------------------------------------------------
perturbedGeomVFile.close() perturbedGeomVFile.close()

View File

@ -65,5 +65,5 @@ for name in filenames:
table = damask.Table(seeds[mask],{'pos':(3,)},comments) table = damask.Table(seeds[mask],{'pos':(3,)},comments)
table.add('microstructure',microstructure[mask]) table.add('microstructure',microstructure[mask])
table.to_ASCII(sys.stdout if name is None else \ table.to_file(sys.stdout if name is None else \
os.path.splitext(name)[0]+'.seeds') os.path.splitext(name)[0]+'.seeds')

View File

@ -92,5 +92,5 @@ for name in filenames:
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
table.set('microstructure',table.get('microstructure').astype(np.int)) table.set('microstructure',table.get('microstructure').astype(np.int))
table.to_ASCII(sys.stdout if name is None else \ table.to_file(sys.stdout if name is None else \
os.path.splitext(name)[0]+'_poked_{}.seeds'.format(options.N)) os.path.splitext(name)[0]+f'_poked_{options.N}.seeds')

View File

@ -162,4 +162,4 @@ for name in filenames:
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N) else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
table.add('weight',weights) table.add('weight',weights)
table.to_ASCII(sys.stdout if name is None else name) table.to_file(sys.stdout if name is None else name)

View File

@ -300,9 +300,9 @@ class Colormap(mpl.colors.ListedColormap):
if fhandle is None: if fhandle is None:
with open(colormap.name.replace(' ','_')+'.txt', 'w') as f: with open(colormap.name.replace(' ','_')+'.txt', 'w') as f:
t.to_ASCII(f,True) t.to_file(f,new_style=True)
else: else:
t.to_ASCII(fhandle,True) t.to_file(fhandle,new_style=True)
@staticmethod @staticmethod
def _export_GOM(colormap,fhandle=None): def _export_GOM(colormap,fhandle=None):

View File

@ -447,36 +447,54 @@ class Geom:
) )
def to_file(self,fname,pack=None): def to_file(self,fname,format='vtr',pack=None):
""" """
Writes a geom file. Writes a geom file.
Parameters Parameters
---------- ----------
fname : str or file handle
Geometry file to write.
format : {'vtr', 'ASCII'}, optional
File format, defaults to 'vtr'. Available formats are:
- vtr: VTK rectilinear grid file, extension '.vtr'.
- ASCII: Plain text file, extension '.geom'.
pack : bool, optional
Compress ASCII geometry with 'x of y' and 'a to b'.
"""
def _to_ASCII(geom,fname,pack=None):
"""
Writes a geom file.
Parameters
----------
geom : Geom object
Geometry to write.
fname : str or file handle fname : str or file handle
Geometry file to write. Geometry file to write.
pack : bool, optional pack : bool, optional
Compress geometry with 'x of y' and 'a to b'. Compress geometry with 'x of y' and 'a to b'.
""" """
header = [f'{len(self.comments)+4} header'] + self.comments header = [f'{len(geom.comments)+4} header'] + geom.comments
header.append('grid a {} b {} c {}'.format(*self.get_grid())) header.append('grid a {} b {} c {}'.format(*geom.get_grid()))
header.append('size x {} y {} z {}'.format(*self.get_size())) header.append('size x {} y {} z {}'.format(*geom.get_size()))
header.append('origin x {} y {} z {}'.format(*self.get_origin())) header.append('origin x {} y {} z {}'.format(*geom.get_origin()))
header.append(f'homogenization {self.get_homogenization()}') header.append(f'homogenization {geom.get_homogenization()}')
grid = self.get_grid() grid = geom.get_grid()
if pack is None: if pack is None:
plain = grid.prod()/self.N_microstructure < 250 plain = grid.prod()/geom.N_microstructure < 250
else: else:
plain = not pack plain = not pack
if plain: if plain:
format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \ format_string = '%g' if geom.microstructure.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(geom.microstructure)))))
np.savetxt(fname, np.savetxt(fname,
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, geom.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='') header='\n'.join(header), fmt=format_string, comments='')
else: else:
try: try:
@ -487,7 +505,7 @@ class Geom:
compressType = None compressType = None
former = start = -1 former = start = -1
reps = 0 reps = 0
for current in self.microstructure.flatten('F'): for current in geom.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current): if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to' compressType = 'to'
reps += 1 reps += 1
@ -518,31 +536,39 @@ class Geom:
f.write(f'{reps} of {former}\n') f.write(f'{reps} of {former}\n')
def to_vtr(self,fname=None): def _to_vtr(geom,fname=None):
""" """
Generates vtk rectilinear grid. Generates vtk rectilinear grid.
Parameters Parameters
---------- ----------
geom : Geom object
Geometry to write.
fname : str, optional fname : str, optional
Filename to write. If no file is given, a string is returned. Filename to write. If no file is given, a string is returned.
Valid extension is .vtr, it will be appended if not given. Valid extension is .vtr, it will be appended if not given.
""" """
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) v = VTK.from_rectilinearGrid(geom.grid,geom.size,geom.origin)
v.add(self.microstructure.flatten(order='F'),'materialpoint') v.add(geom.microstructure.flatten(order='F'),'materialpoint')
v.add_comments(self.comments) v.add_comments(geom.comments)
if fname: if fname:
v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
else: else:
sys.stdout.write(v.__repr__()) sys.stdout.write(v.__repr__())
if format.lower() == 'ascii':
return _to_ASCII(self,fname,pack)
elif format.lower() == 'vtr':
return _to_vtr(self,fname)
else:
raise TypeError(f'Unknown format {format}.')
def as_ASCII(self): def as_ASCII(self,pack=False):
"""Format geometry as human-readable ASCII.""" """Format geometry as human-readable ASCII."""
f = StringIO() f = StringIO()
self.to_file(f) self.to_file(f,'ASCII',pack)
f.seek(0) f.seek(0)
return ''.join(f.readlines()) return ''.join(f.readlines())

View File

@ -313,12 +313,29 @@ class Table:
self.shapes[key] = other.shapes[key] self.shapes[key] = other.shapes[key]
def to_ASCII(self,fname,new_style=False): def to_file(self,fname,format='ASCII',new_style=False):
""" """
Store as plain text file. Store as plain text file.
Parameters Parameters
---------- ----------
fname : file, str, or pathlib.Path
Filename or file for writing.
format : {ASCII'}, optional
File format, defaults to 'ASCII'. Available formats are:
- ASCII: Plain text file, extension '.txt'.
new_style : Boolean, optional
Write table in new style, indicating header lines by comment sign ('#') only.
"""
def _to_ASCII(table,fname,new_style=False):
"""
Store as plain text file.
Parameters
----------
table : Table object
Table to write.
fname : file, str, or pathlib.Path fname : file, str, or pathlib.Path
Filename or file for writing. Filename or file for writing.
new_style : Boolean, optional new_style : Boolean, optional
@ -327,21 +344,18 @@ class Table:
""" """
seen = set() seen = set()
labels = [] labels = []
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]: for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]:
if self.shapes[l] == (1,): if table.shapes[l] == (1,):
labels.append(f'{l}') labels.append(f'{l}')
elif len(self.shapes[l]) == 1: elif len(table.shapes[l]) == 1:
labels += [f'{i+1}_{l}' \ labels += [f'{i+1}_{l}' \
for i in range(self.shapes[l][0])] for i in range(table.shapes[l][0])]
else: else:
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \ labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))] for i in range(np.prod(table.shapes[l]))]
if new_style: header = [f'# {comment}' for comment in table.comments] if new_style else \
header = [f'# {comment}' for comment in self.comments] [f'{len(table.comments)+1} header'] + table.comments
else:
header = [f'{len(self.comments)+1} header'] \
+ self.comments \
try: try:
f = open(fname,'w') f = open(fname,'w')
@ -349,4 +363,9 @@ class Table:
f = fname f = fname
for line in header + [' '.join(labels)]: f.write(line+'\n') for line in header + [' '.join(labels)]: f.write(line+'\n')
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False) table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
if format.lower() == 'ascii':
return _to_ASCII(self,fname,new_style)
else:
raise TypeError(f'Unknown format {format}.')

View File

@ -63,13 +63,13 @@ class TestGeom:
def test_write_read_str(self,default,tmpdir): def test_write_read_str(self,default,tmpdir):
default.to_file(str(tmpdir/'default.geom')) default.to_file(str(tmpdir/'default.geom'),format='ASCII')
new = Geom.from_file(str(tmpdir/'default.geom')) new = Geom.from_file(str(tmpdir/'default.geom'))
assert geom_equal(default,new) assert geom_equal(default,new)
def test_write_read_file(self,default,tmpdir): def test_write_read_file(self,default,tmpdir):
with open(tmpdir/'default.geom','w') as f: with open(tmpdir/'default.geom','w') as f:
default.to_file(f,pack=True) default.to_file(f,format='ASCII',pack=True)
with open(tmpdir/'default.geom') as f: with open(tmpdir/'default.geom') as f:
new = Geom.from_file(f) new = Geom.from_file(f)
assert geom_equal(default,new) assert geom_equal(default,new)
@ -82,8 +82,7 @@ class TestGeom:
assert geom_equal(default,new) assert geom_equal(default,new)
def test_read_write_vtr(self,default,tmpdir): def test_read_write_vtr(self,default,tmpdir):
default.to_vtr(tmpdir/'default') default.to_file(tmpdir/'default',format='vtr')
print(default.to_vtr())
for _ in range(10): for _ in range(10):
time.sleep(.2) time.sleep(.2)
if os.path.exists(tmpdir/'default.vtr'): break if os.path.exists(tmpdir/'default.vtr'): break
@ -110,7 +109,7 @@ class TestGeom:
@pytest.mark.parametrize('pack',[True,False]) @pytest.mark.parametrize('pack',[True,False])
def test_pack(self,default,tmpdir,pack): def test_pack(self,default,tmpdir,pack):
default.to_file(tmpdir/'default.geom',pack=pack) default.to_file(tmpdir/'default.geom',format='ASCII',pack=pack)
new = Geom.from_file(tmpdir/'default.geom') new = Geom.from_file(tmpdir/'default.geom')
assert geom_equal(new,default) assert geom_equal(new,default)
@ -140,6 +139,10 @@ class TestGeom:
with pytest.raises(TypeError): with pytest.raises(TypeError):
default.set_homogenization(homogenization=0) default.set_homogenization(homogenization=0)
def test_invalid_write_format(self,default):
with pytest.raises(TypeError):
default.to_file(format='invalid')
@pytest.mark.parametrize('directions,reflect',[ @pytest.mark.parametrize('directions,reflect',[
(['x'], False), (['x'], False),
(['x','y','z'],True), (['x','y','z'],True),
@ -196,7 +199,7 @@ class TestGeom:
current = default.clean(stencil,selection,periodic) current = default.clean(stencil,selection,periodic)
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}' reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
if update and stencil > 1: if update and stencil > 1:
current.to_vtr(reference) current.to_file(reference,format='vtr')
for _ in range(10): for _ in range(10):
time.sleep(.2) time.sleep(.2)
if os.path.exists(reference.with_suffix('.vtr')): break if os.path.exists(reference.with_suffix('.vtr')): break

View File

@ -35,7 +35,7 @@ class TestTable:
@pytest.mark.parametrize('mode',['str','path']) @pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmpdir,mode): def test_write_read(self,default,tmpdir,mode):
default.to_ASCII(tmpdir/'default.txt') default.to_file(tmpdir/'default.txt')
if mode == 'path': if mode == 'path':
new = Table.from_ASCII(tmpdir/'default.txt') new = Table.from_ASCII(tmpdir/'default.txt')
elif mode == 'str': elif mode == 'str':
@ -43,19 +43,22 @@ class TestTable:
assert all(default.data==new.data) and default.shapes == new.shapes assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_file(self,default,tmpdir): def test_write_read_file(self,default,tmpdir):
with open(tmpdir.join('default.txt'),'w') as f: with open(tmpdir/'default.txt','w') as f:
default.to_ASCII(f) default.to_file(f)
with open(tmpdir.join('default.txt')) as f: with open(tmpdir/'default.txt') as f:
new = Table.from_ASCII(f) new = Table.from_ASCII(f)
assert all(default.data==new.data) and default.shapes == new.shapes assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_new_style(self,default,tmpdir): def test_write_read_new_style(self,default,tmpdir):
with open(tmpdir.join('new_style.txt'),'w') as f: with open(tmpdir/'new_style.txt','w') as f:
default.to_ASCII(f,new_style=True) default.to_file(f,new_style=True)
with open(tmpdir.join('new_style.txt')) as f: with open(tmpdir/'new_style.txt') as f:
new = Table.from_ASCII(f) new = Table.from_ASCII(f)
assert all(default.data==new.data) and default.shapes == new.shapes assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_invalid_format(self,default,tmpdir):
with pytest.raises(TypeError):
default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid')
@pytest.mark.parametrize('mode',['str','path']) @pytest.mark.parametrize('mode',['str','path'])
def test_read_ang(self,reference_dir,mode): def test_read_ang(self,reference_dir,mode):