ensure that files are closed automatically

reported by Karo Sedighiani
This commit is contained in:
Martin Diehl 2023-12-23 07:04:04 +01:00
parent af5bbed003
commit a8e979e904
No known key found for this signature in database
GPG Key ID: 1FD50837275A0A9B
5 changed files with 69 additions and 79 deletions

View File

@ -2,7 +2,7 @@ import os
import json
import functools
import colorsys
from typing import Optional, Union, TextIO
from typing import Optional, Union
from itertools import chain
import numpy as np
@ -344,30 +344,6 @@ class Colormap(mpl.colors.ListedColormap):
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
def _get_file_handle(self,
fname: Union[FileHandle, None],
suffix: str = '') -> TextIO:
"""
Provide file handle.
Parameters
----------
fname : file, str, pathlib.Path, or None
Name or handle of file.
If None, colormap name + suffix.
suffix: str, optional
Extension to use for colormap file.
Defaults to empty.
Returns
-------
f : file object
File handle with write access.
"""
return util.open_text(self.name.replace(' ','_')+suffix if fname is None else fname, 'w')
def save_paraview(self,
fname: Optional[FileHandle] = None):
"""
@ -387,9 +363,9 @@ class Colormap(mpl.colors.ListedColormap):
'RGBPoints':list(chain.from_iterable([(i,*c) for i,c in enumerate(self.colors.round(6))]))
}]
fhandle = self._get_file_handle(fname,'.json')
json.dump(out,fhandle,indent=4)
fhandle.write('\n')
with util.open_text(self.name.replace(' ','_')+'.json' if fname is None else fname, 'w') as fhandle:
json.dump(out,fhandle,indent=4)
fhandle.write('\n')
def save_ASCII(self,
@ -405,7 +381,9 @@ class Colormap(mpl.colors.ListedColormap):
"""
labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3}
t = Table(labels,self.colors,[f'Creator: {util.execution_stamp("Colormap")}'])
t.save(self._get_file_handle(fname,'.txt'))
with util.open_text(self.name.replace(' ','_')+'.txt' if fname is None else fname, 'w') as fhandle:
t.save(fhandle)
def save_GOM(self, fname: Optional[FileHandle] = None):
@ -425,7 +403,8 @@ class Colormap(mpl.colors.ListedColormap):
+ ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((self.colors*255).astype(np.int64))]) \
+ '\n'
self._get_file_handle(fname,'.legend').write(GOM_str)
with util.open_text(self.name.replace(' ','_')+'.legend' if fname is None else fname, 'w') as fhandle:
fhandle.write(GOM_str)
def save_gmsh(self,
@ -443,7 +422,9 @@ class Colormap(mpl.colors.ListedColormap):
gmsh_str = 'View.ColorTable = {\n' \
+'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in self.colors[:,:3]*255]) \
+'\n}\n'
self._get_file_handle(fname,'.msh').write(gmsh_str)
with util.open_text(self.name.replace(' ','_')+'.msh' if fname is None else fname, 'w') as fhandle:
fhandle.write(gmsh_str)
@staticmethod

View File

@ -70,9 +70,9 @@ class LoadcaseGrid(YAML):
if key not in kwargs:
kwargs[key] = default
fhandle = util.open_text(fname,'w')
try:
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))
except TypeError: # compatibility with old pyyaml
del kwargs['sort_keys']
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))
with util.open_text(fname,'w') as fhandle:
try:
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))
except TypeError: # compatibility with old pyyaml
del kwargs['sort_keys']
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))

View File

@ -277,28 +277,28 @@ class Table:
Table data from file.
"""
f = util.open_text(fname)
f.seek(0)
with util.open_text(fname) as f:
f.seek(0)
comments = []
while (line := f.readline().strip()).startswith('#'):
comments.append(line.lstrip('#').strip())
labels = line.split()
comments = []
while (line := f.readline().strip()).startswith('#'):
comments.append(line.lstrip('#').strip())
labels = line.split()
shapes = {}
for label in labels:
tensor_column = re.search(r'[0-9,x]*?:[0-9]*?_',label)
if tensor_column:
my_shape = tensor_column.group().split(':',1)[0].split('x')
shapes[label.split('_',1)[1]] = tuple([int(d) for d in my_shape])
else:
vector_column = re.match(r'[0-9]*?_',label)
if vector_column:
shapes[label.split('_',1)[1]] = (int(label.split('_',1)[0]),)
shapes = {}
for label in labels:
tensor_column = re.search(r'[0-9,x]*?:[0-9]*?_',label)
if tensor_column:
my_shape = tensor_column.group().split(':',1)[0].split('x')
shapes[label.split('_',1)[1]] = tuple([int(d) for d in my_shape])
else:
shapes[label] = (1,)
vector_column = re.match(r'[0-9]*?_',label)
if vector_column:
shapes[label.split('_',1)[1]] = (int(label.split('_',1)[0]),)
else:
shapes[label] = (1,)
data = pd.read_csv(f,names=list(range(len(labels))),sep=r'\s+')
data = pd.read_csv(f,names=list(range(len(labels))),sep=r'\s+')
return Table(shapes,data,comments)
@ -339,10 +339,9 @@ class Table:
Table data from file.
"""
f = util.open_text(fname)
f.seek(0)
content = f.readlines()
with util.open_text(fname) as f:
f.seek(0)
content = f.readlines()
comments = [util.execution_stamp('Table','from_ang')]
for line in content:
@ -605,10 +604,9 @@ class Table:
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))]
f = util.open_text(fname,'w')
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else ''))
try: # backward compatibility
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,lineterminator='\n')
except TypeError:
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,line_terminator='\n')
with util.open_text(fname,'w') as f:
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else ''))
try: # backward compatibility
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,lineterminator='\n')
except TypeError:
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,line_terminator='\n')

View File

@ -197,7 +197,9 @@ class YAML(dict):
YAML from file.
"""
return cls(yaml.load(util.open_text(fname), Loader=SafeLoader))
with util.open_text(fname) as fhandle:
return cls(yaml.load(fhandle, Loader=SafeLoader))
def save(self,
fname: FileHandle,
@ -220,12 +222,12 @@ class YAML(dict):
if 'sort_keys' not in kwargs:
kwargs['sort_keys'] = False
fhandle = util.open_text(fname,'w')
try:
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
except TypeError: # compatibility with old pyyaml
del kwargs['sort_keys']
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
with util.open_text(fname,'w') as fhandle:
try:
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
except TypeError: # compatibility with old pyyaml
del kwargs['sort_keys']
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
@property

View File

@ -8,12 +8,13 @@ import shlex as _shlex
import re as _re
import signal as _signal
import fractions as _fractions
import contextlib as _contextlib
from collections import abc as _abc, OrderedDict as _OrderedDict
from functools import reduce as _reduce, partial as _partial, wraps as _wraps
import inspect
from typing import Optional as _Optional, Callable as _Callable, Union as _Union, Iterable as _Iterable, \
Dict as _Dict, List as _List, Tuple as _Tuple, Literal as _Literal, \
Any as _Any, TextIO as _TextIO
Any as _Any, TextIO as _TextIO, Generator as _Generator
from pathlib import Path as _Path
import numpy as _np
@ -193,11 +194,15 @@ def run(cmd: str,
return stdout, stderr
@_contextlib.contextmanager
def open_text(fname: _FileHandle,
mode: _Literal['r','w'] = 'r') -> _TextIO: # noqa
mode: _Literal['r','w'] = 'r') -> _Generator[_TextIO, None, None]: # noqa
"""
Open a text file.
Open a text file with Unix line endings
If a path or string is given, a context manager ensures that
the file handle is closed.
If a file handle is given, it remains unmodified.
Parameters
----------
@ -211,8 +216,12 @@ def open_text(fname: _FileHandle,
f : file handle
"""
return fname if not isinstance(fname, (str,_Path)) else \
open(_Path(fname).expanduser(),mode,newline=('\n' if mode == 'w' else None))
if isinstance(fname, (str,_Path)):
fhandle = open(_Path(fname).expanduser(),mode,newline=('\n' if mode == 'w' else None))
yield fhandle
fhandle.close()
else:
yield fname
def execution_stamp(class_name: str,