Merge branch 'with-to-file-methods' into 'development'

modified file writing of geom and table to a general to_file() that takes a "format=" argument

See merge request damask/DAMASK!213
This commit is contained in:
Franz Roters 2020-09-04 10:27:30 +02:00
commit 23d6151197
31 changed files with 202 additions and 149 deletions

@ -1 +1 @@
Subproject commit 1ca2223c68475bbcb9da633353dbe4a98c18db0d
Subproject commit c6dc3f116fc6007caf076772d5c29f0a0523544d

View File

@ -49,4 +49,4 @@ for name in filenames:
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -45,4 +45,4 @@ for name in filenames:
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -60,4 +60,4 @@ for filename in options.filenames:
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits))
table.to_ASCII(os.path.join(dirname,file_out))
table.to_file(os.path.join(dirname,file_out))

View File

@ -191,4 +191,4 @@ for name in filenames:
volumeMismatch.reshape(-1,1,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -71,4 +71,4 @@ for name in filenames:
derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -60,7 +60,7 @@ for name in filenames:
table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
else:
table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
@ -68,4 +68,4 @@ for name in filenames:
table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -184,4 +184,4 @@ for name in filenames:
distance[i,:],
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -73,4 +73,4 @@ for name in filenames:
mode = 'wrap' if options.periodic else 'nearest'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -55,4 +55,4 @@ for name in filenames:
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -61,4 +61,4 @@ for name in filenames:
for data in options.label:
table.add(data+'_addIndexed',indexedTable.get(data)[idx],scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -147,4 +147,4 @@ for name in filenames:
if 'axisangle' in options.output:
table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -189,4 +189,4 @@ for name in filenames:
for i,label in enumerate(labels):
table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -58,4 +58,4 @@ for name in filenames:
rng.shuffle(uniques)
table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -154,4 +154,4 @@ for name in filenames:
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
geom.to_file(os.path.splitext(name)[0]+'.geom',pack=False)
geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False)

View File

@ -89,4 +89,4 @@ geom=damask.Geom(microstructure,options.size,
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else name,pack=False)
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -142,4 +142,4 @@ geom = damask.Geom(microstructure.reshape(grid),
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else name,pack=False)
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -105,4 +105,5 @@ for name in filenames:
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
format='ASCII',pack=False)

View File

@ -228,4 +228,5 @@ for name in filenames:
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
format='ASCII',pack=False)

View File

@ -172,4 +172,4 @@ for name in filenames:
geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)

View File

@ -90,7 +90,7 @@ class myThread (threading.Thread):
direction[i]*=2.
i+= 1
perturbedSeedsTable.set('pos',coords)
perturbedSeedsTable.to_ASCII(perturbedSeedsVFile)
perturbedSeedsTable.to_file(perturbedSeedsVFile)
#--- do tesselation with perturbed seed file ------------------------------------------------------
perturbedGeomVFile.close()

View File

@ -65,5 +65,5 @@ for name in filenames:
table = damask.Table(seeds[mask],{'pos':(3,)},comments)
table.add('microstructure',microstructure[mask])
table.to_ASCII(sys.stdout if name is None else \
os.path.splitext(name)[0]+'.seeds')
table.to_file(sys.stdout if name is None else \
os.path.splitext(name)[0]+'.seeds')

View File

@ -92,5 +92,5 @@ for name in filenames:
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
table.set('microstructure',table.get('microstructure').astype(np.int))
table.to_ASCII(sys.stdout if name is None else \
os.path.splitext(name)[0]+'_poked_{}.seeds'.format(options.N))
table.to_file(sys.stdout if name is None else \
os.path.splitext(name)[0]+f'_poked_{options.N}.seeds')

View File

@ -162,4 +162,4 @@ for name in filenames:
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
table.add('weight',weights)
table.to_ASCII(sys.stdout if name is None else name)
table.to_file(sys.stdout if name is None else name)

View File

@ -300,9 +300,9 @@ class Colormap(mpl.colors.ListedColormap):
if fhandle is None:
with open(colormap.name.replace(' ','_')+'.txt', 'w') as f:
t.to_ASCII(f,True)
t.to_file(f,new_style=True)
else:
t.to_ASCII(fhandle,True)
t.to_file(fhandle,new_style=True)
@staticmethod
def _export_GOM(colormap,fhandle=None):

View File

@ -447,7 +447,7 @@ class Geom:
)
def to_file(self,fname,pack=None):
def to_file(self,fname,format='vtr',pack=None):
"""
Writes a geom file.
@ -455,94 +455,120 @@ class Geom:
----------
fname : str or file handle
Geometry file to write.
format : {'vtr', 'ASCII'}, optional
File format, defaults to 'vtr'. Available formats are:
- vtr: VTK rectilinear grid file, extension '.vtr'.
- ASCII: Plain text file, extension '.geom'.
pack : bool, optional
Compress geometry with 'x of y' and 'a to b'.
Compress ASCII geometry with 'x of y' and 'a to b'.
"""
header = [f'{len(self.comments)+4} header'] + self.comments
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
header.append('size x {} y {} z {}'.format(*self.get_size()))
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append(f'homogenization {self.get_homogenization()}')
def _to_ASCII(geom,fname,pack=None):
"""
Writes a geom file.
grid = self.get_grid()
Parameters
----------
geom : Geom object
Geometry to write.
fname : str or file handle
Geometry file to write.
pack : bool, optional
Compress geometry with 'x of y' and 'a to b'.
if pack is None:
plain = grid.prod()/self.N_microstructure < 250
"""
header = [f'{len(geom.comments)+4} header'] + geom.comments
header.append('grid a {} b {} c {}'.format(*geom.get_grid()))
header.append('size x {} y {} z {}'.format(*geom.get_size()))
header.append('origin x {} y {} z {}'.format(*geom.get_origin()))
header.append(f'homogenization {geom.get_homogenization()}')
grid = geom.get_grid()
if pack is None:
plain = grid.prod()/geom.N_microstructure < 250
else:
plain = not pack
if plain:
format_string = '%g' if geom.microstructure.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(geom.microstructure)))))
np.savetxt(fname,
geom.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='')
else:
try:
f = open(fname,'w')
except TypeError:
f = fname
compressType = None
former = start = -1
reps = 0
for current in geom.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to'
reps += 1
elif current == former and start == former:
compressType = 'of'
reps += 1
else:
if compressType is None:
f.write('\n'.join(header)+'\n')
elif compressType == '.':
f.write(f'{former}\n')
elif compressType == 'to':
f.write(f'{start} to {former}\n')
elif compressType == 'of':
f.write(f'{reps} of {former}\n')
compressType = '.'
start = current
reps = 1
former = current
if compressType == '.':
f.write(f'{former}\n')
elif compressType == 'to':
f.write(f'{start} to {former}\n')
elif compressType == 'of':
f.write(f'{reps} of {former}\n')
def _to_vtr(geom,fname=None):
"""
Generates vtk rectilinear grid.
Parameters
----------
geom : Geom object
Geometry to write.
fname : str, optional
Filename to write. If no file is given, a string is returned.
Valid extension is .vtr, it will be appended if not given.
"""
v = VTK.from_rectilinearGrid(geom.grid,geom.size,geom.origin)
v.add(geom.microstructure.flatten(order='F'),'materialpoint')
v.add_comments(geom.comments)
if fname:
v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
else:
sys.stdout.write(v.__repr__())
if format.lower() == 'ascii':
return _to_ASCII(self,fname,pack)
elif format.lower() == 'vtr':
return _to_vtr(self,fname)
else:
plain = not pack
raise TypeError(f'Unknown format {format}.')
if plain:
format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
np.savetxt(fname,
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='')
else:
try:
f = open(fname,'w')
except TypeError:
f = fname
compressType = None
former = start = -1
reps = 0
for current in self.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to'
reps += 1
elif current == former and start == former:
compressType = 'of'
reps += 1
else:
if compressType is None:
f.write('\n'.join(header)+'\n')
elif compressType == '.':
f.write(f'{former}\n')
elif compressType == 'to':
f.write(f'{start} to {former}\n')
elif compressType == 'of':
f.write(f'{reps} of {former}\n')
compressType = '.'
start = current
reps = 1
former = current
if compressType == '.':
f.write(f'{former}\n')
elif compressType == 'to':
f.write(f'{start} to {former}\n')
elif compressType == 'of':
f.write(f'{reps} of {former}\n')
def to_vtr(self,fname=None):
"""
Generates vtk rectilinear grid.
Parameters
----------
fname : str, optional
Filename to write. If no file is given, a string is returned.
Valid extension is .vtr, it will be appended if not given.
"""
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
v.add(self.microstructure.flatten(order='F'),'materialpoint')
v.add_comments(self.comments)
if fname:
v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
else:
sys.stdout.write(v.__repr__())
def as_ASCII(self):
def as_ASCII(self,pack=False):
"""Format geometry as human-readable ASCII."""
f = StringIO()
self.to_file(f)
self.to_file(f,'ASCII',pack)
f.seek(0)
return ''.join(f.readlines())

View File

@ -313,7 +313,7 @@ class Table:
self.shapes[key] = other.shapes[key]
def to_ASCII(self,fname,new_style=False):
def to_file(self,fname,format='ASCII',new_style=False):
"""
Store as plain text file.
@ -321,32 +321,51 @@ class Table:
----------
fname : file, str, or pathlib.Path
Filename or file for writing.
format : {ASCII'}, optional
File format, defaults to 'ASCII'. Available formats are:
- ASCII: Plain text file, extension '.txt'.
new_style : Boolean, optional
Write table in new style, indicating header lines by comment sign ('#') only.
"""
seen = set()
labels = []
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
if self.shapes[l] == (1,):
labels.append(f'{l}')
elif len(self.shapes[l]) == 1:
labels += [f'{i+1}_{l}' \
for i in range(self.shapes[l][0])]
else:
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))]
def _to_ASCII(table,fname,new_style=False):
"""
Store as plain text file.
if new_style:
header = [f'# {comment}' for comment in self.comments]
Parameters
----------
table : Table object
Table to write.
fname : file, str, or pathlib.Path
Filename or file for writing.
new_style : Boolean, optional
Write table in new style, indicating header lines by comment sign ('#') only.
"""
seen = set()
labels = []
for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]:
if table.shapes[l] == (1,):
labels.append(f'{l}')
elif len(table.shapes[l]) == 1:
labels += [f'{i+1}_{l}' \
for i in range(table.shapes[l][0])]
else:
labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(table.shapes[l]))]
header = [f'# {comment}' for comment in table.comments] if new_style else \
[f'{len(table.comments)+1} header'] + table.comments
try:
f = open(fname,'w')
except TypeError:
f = fname
for line in header + [' '.join(labels)]: f.write(line+'\n')
table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
if format.lower() == 'ascii':
return _to_ASCII(self,fname,new_style)
else:
header = [f'{len(self.comments)+1} header'] \
+ self.comments \
try:
f = open(fname,'w')
except TypeError:
f = fname
for line in header + [' '.join(labels)]: f.write(line+'\n')
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
raise TypeError(f'Unknown format {format}.')

View File

@ -63,13 +63,13 @@ class TestGeom:
def test_write_read_str(self,default,tmpdir):
default.to_file(str(tmpdir/'default.geom'))
default.to_file(str(tmpdir/'default.geom'),format='ASCII')
new = Geom.from_file(str(tmpdir/'default.geom'))
assert geom_equal(default,new)
def test_write_read_file(self,default,tmpdir):
with open(tmpdir/'default.geom','w') as f:
default.to_file(f,pack=True)
default.to_file(f,format='ASCII',pack=True)
with open(tmpdir/'default.geom') as f:
new = Geom.from_file(f)
assert geom_equal(default,new)
@ -82,8 +82,7 @@ class TestGeom:
assert geom_equal(default,new)
def test_read_write_vtr(self,default,tmpdir):
default.to_vtr(tmpdir/'default')
print(default.to_vtr())
default.to_file(tmpdir/'default',format='vtr')
for _ in range(10):
time.sleep(.2)
if os.path.exists(tmpdir/'default.vtr'): break
@ -110,7 +109,7 @@ class TestGeom:
@pytest.mark.parametrize('pack',[True,False])
def test_pack(self,default,tmpdir,pack):
default.to_file(tmpdir/'default.geom',pack=pack)
default.to_file(tmpdir/'default.geom',format='ASCII',pack=pack)
new = Geom.from_file(tmpdir/'default.geom')
assert geom_equal(new,default)
@ -140,6 +139,10 @@ class TestGeom:
with pytest.raises(TypeError):
default.set_homogenization(homogenization=0)
def test_invalid_write_format(self,default):
with pytest.raises(TypeError):
default.to_file(format='invalid')
@pytest.mark.parametrize('directions,reflect',[
(['x'], False),
(['x','y','z'],True),
@ -196,7 +199,7 @@ class TestGeom:
current = default.clean(stencil,selection,periodic)
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
if update and stencil > 1:
current.to_vtr(reference)
current.to_file(reference,format='vtr')
for _ in range(10):
time.sleep(.2)
if os.path.exists(reference.with_suffix('.vtr')): break

View File

@ -35,7 +35,7 @@ class TestTable:
@pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmpdir,mode):
default.to_ASCII(tmpdir/'default.txt')
default.to_file(tmpdir/'default.txt')
if mode == 'path':
new = Table.from_ASCII(tmpdir/'default.txt')
elif mode == 'str':
@ -43,19 +43,22 @@ class TestTable:
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_file(self,default,tmpdir):
with open(tmpdir.join('default.txt'),'w') as f:
default.to_ASCII(f)
with open(tmpdir.join('default.txt')) as f:
with open(tmpdir/'default.txt','w') as f:
default.to_file(f)
with open(tmpdir/'default.txt') as f:
new = Table.from_ASCII(f)
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_new_style(self,default,tmpdir):
with open(tmpdir.join('new_style.txt'),'w') as f:
default.to_ASCII(f,new_style=True)
with open(tmpdir.join('new_style.txt')) as f:
with open(tmpdir/'new_style.txt','w') as f:
default.to_file(f,new_style=True)
with open(tmpdir/'new_style.txt') as f:
new = Table.from_ASCII(f)
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_invalid_format(self,default,tmpdir):
with pytest.raises(TypeError):
default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid')
@pytest.mark.parametrize('mode',['str','path'])
def test_read_ang(self,reference_dir,mode):