shortened excessively long lines
This commit is contained in:
parent
3a267dc7ff
commit
36c703ecd3
|
@ -154,7 +154,8 @@ for name in filenames:
|
|||
stack = [table.data]
|
||||
for type, data in items.iteritems():
|
||||
for i,label in enumerate(data['active']):
|
||||
stack.append(curlFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
stack.append(curlFFT(size[::-1],
|
||||
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ parser.set_defaults(coords = 'ipinitialcoord',
|
|||
if options.vector is None and options.tensor is None:
|
||||
parser.error('no data column specified.')
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
# --- loop over input files ------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
|
@ -134,14 +134,15 @@ for name in filenames:
|
|||
maxcorner = np.array(map(max,coords))
|
||||
grid = np.array(map(len,coords),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
|
||||
|
||||
# ------------------------------------------ process value field -----------------------------------
|
||||
|
||||
stack = [table.data]
|
||||
for type, data in items.iteritems():
|
||||
for i,label in enumerate(data['active']):
|
||||
stack.append(divFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
stack.append(divFFT(size[::-1],
|
||||
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ parser.set_defaults(coords = 'ipinitialcoord',
|
|||
if options.vector is None and options.scalar is None:
|
||||
parser.error('no data column specified.')
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
# --- loop over input files ------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
|
@ -124,7 +124,7 @@ for name in filenames:
|
|||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||
for type, data in items.iteritems():
|
||||
for label in data['active']:
|
||||
table.labels_append(['{}_gradFFT({})'.format(i+1,label) for i in xrange(3 * data['dim'])]) # extend ASCII header with new labels # grad increases the field dimension by one
|
||||
table.labels_append(['{}_gradFFT({})'.format(i+1,label) for i in xrange(3 * data['dim'])]) # extend ASCII header with new labels
|
||||
table.head_write()
|
||||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
@ -143,7 +143,8 @@ for name in filenames:
|
|||
stack = [table.data]
|
||||
for type, data in items.iteritems():
|
||||
for i,label in enumerate(data['active']):
|
||||
stack.append(gradFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||
stack.append(gradFFT(size[::-1],
|
||||
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||
|
||||
|
|
Loading…
Reference in New Issue