more python 3 compatibility
This commit is contained in:
parent
35e470ff4d
commit
f928bd5e5b
|
@ -83,10 +83,9 @@ for name in filenames:
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
for type, data in items.items():
|
for type, data in items.items():
|
||||||
for column in data['column']:
|
for column in data['column']:
|
||||||
table.data_append(determinant(map(float,table.data[column:
|
table.data_append(determinant(list(map(float,table.data[column: column+data['dim']]))))
|
||||||
column+data['dim']])))
|
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
table.close() # close input ASCII table (works for stdin)
|
||||||
|
|
|
@ -95,8 +95,8 @@ for name in filenames:
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
for type, data in items.items():
|
for type, data in items.items():
|
||||||
for column in data['column']:
|
for column in data['column']:
|
||||||
table.data_append(deviator(map(float,table.data[column:
|
table.data_append(deviator(list(map(float,table.data[column:
|
||||||
column+data['dim']]),options.spherical))
|
column+data['dim']])),options.spherical))
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
|
@ -168,13 +168,7 @@ for name in filenames:
|
||||||
np.zeros((table.data.shape[0],
|
np.zeros((table.data.shape[0],
|
||||||
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,9+i]) for i in range(3)]
|
grid,size = damask.util.coordGridAndSize(table.data[:,9:12])
|
||||||
mincorner = np.array(map(min,coords))
|
|
||||||
maxcorner = np.array(map(max,coords))
|
|
||||||
grid = np.array(map(len,coords),'i')
|
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
|
||||||
|
|
||||||
N = grid.prod()
|
N = grid.prod()
|
||||||
|
|
||||||
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
|
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
|
||||||
|
|
|
@ -88,9 +88,9 @@ for name in filenames:
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
for column in columns:
|
for column in columns:
|
||||||
table.data_append(E_hkl(map(float,table.data[column:column+3]),options.hkl))
|
table.data_append(E_hkl(list(map(float,table.data[column:column+3])),options.hkl))
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
table.close() # close ASCII tables
|
table.close() # close ASCII tables
|
||||||
|
|
|
@ -151,10 +151,8 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
coordDim = table.label_dimension(options.pos)
|
if not 3 >= table.label_dimension(options.pos) >= 1:
|
||||||
if not 3 >= coordDim >= 1:
|
|
||||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||||
else: coordCol = table.label_index(options.pos)
|
|
||||||
|
|
||||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
||||||
else: idCol = table.label_index(options.id)
|
else: idCol = table.label_index(options.id)
|
||||||
|
@ -178,11 +176,7 @@ for name in filenames:
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,coordCol+i]) for i in range(coordDim)]
|
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
|
||||||
mincorner = np.array(map(min,coords))
|
|
||||||
maxcorner = np.array(map(max,coords))
|
|
||||||
grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i')
|
|
||||||
|
|
||||||
N = grid.prod()
|
N = grid.prod()
|
||||||
|
|
||||||
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
||||||
|
|
Loading…
Reference in New Issue