no brackets for reshape required

This commit is contained in:
Martin Diehl 2020-03-17 12:22:48 +01:00
parent 6a1354f80f
commit 45e3b8f7c6
24 changed files with 41 additions and 42 deletions

View File

@ -45,21 +45,21 @@ for filename in options.filenames:
N_digits = 5 # hack to keep test intact N_digits = 5 # hack to keep test intact
for inc in damask.util.show_progress(results.iter_visible('increments'),len(results.increments)): for inc in damask.util.show_progress(results.iter_visible('increments'),len(results.increments)):
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)}) table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})
table.add('pos',coords.reshape((-1,3))) table.add('pos',coords.reshape(-1,3))
results.pick('materialpoints',False) results.pick('materialpoints',False)
results.pick('constituents', True) results.pick('constituents', True)
for label in options.con: for label in options.con:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) != 0: if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1))) table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1))
results.pick('constituents', False) results.pick('constituents', False)
results.pick('materialpoints',True) results.pick('materialpoints',True)
for label in options.mat: for label in options.mat:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) != 0: if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1))) table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1))
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname): if not os.path.isdir(dirname):

View File

@ -112,8 +112,8 @@ for filename in options.filenames:
# Data CrystalStructures # Data CrystalStructures
o[ensemble_label + '/CrystalStructures'] = np.uint32(np.array([999,\ o[ensemble_label + '/CrystalStructures'] = np.uint32(np.array([999,\
Crystal_structures[f.get_crystal_structure()]])).reshape((2,1)) Crystal_structures[f.get_crystal_structure()]])).reshape(2,1)
o[ensemble_label + '/PhaseTypes'] = np.uint32(np.array([999,Phase_types['Primary']])).reshape((2,1)) # ToDo o[ensemble_label + '/PhaseTypes'] = np.uint32(np.array([999,Phase_types['Primary']])).reshape(2,1) # ToDo
# Attributes Ensemble Matrix # Attributes Ensemble Matrix
o[ensemble_label].attrs['AttributeMatrixType'] = np.array([11],np.uint32) o[ensemble_label].attrs['AttributeMatrixType'] = np.array([11],np.uint32)

View File

@ -40,7 +40,7 @@ def volTetrahedron(coords):
# Get all the squares of all side lengths from the differences between # Get all the squares of all side lengths from the differences between
# the 6 different pairs of vertex positions # the 6 different pairs of vertex positions
vertices = np.concatenate((coords[0],coords[1],coords[2],coords[3])).reshape([4,3]) vertices = np.concatenate((coords[0],coords[1],coords[2],coords[3])).reshape(4,3)
vertex1, vertex2 = vertex_pair_indexes[:,0], vertex_pair_indexes[:,1] vertex1, vertex2 = vertex_pair_indexes[:,0], vertex_pair_indexes[:,1]
sides_squared = np.sum((vertices[vertex1] - vertices[vertex2])**2,axis=-1) sides_squared = np.sum((vertices[vertex1] - vertices[vertex2])**2,axis=-1)
@ -185,13 +185,13 @@ for name in filenames:
centers = damask.grid_filters.cell_coord(size,F) centers = damask.grid_filters.cell_coord(size,F)
shapeMismatch = shapeMismatch( size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes,centers) shapeMismatch = shapeMismatch( size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes,centers)
table.add('shapeMismatch(({}))'.format(options.defgrad), table.add('shapeMismatch(({}))'.format(options.defgrad),
shapeMismatch.reshape((-1,1)), shapeMismatch.reshape(-1,1),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
if options.volume: if options.volume:
volumeMismatch = volumeMismatch(size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes) volumeMismatch = volumeMismatch(size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes)
table.add('volMismatch(({}))'.format(options.defgrad), table.add('volMismatch(({}))'.format(options.defgrad),
volumeMismatch.reshape((-1,1)), volumeMismatch.reshape(-1,1),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -51,7 +51,7 @@ for name in filenames:
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
field = field.reshape(np.append(grid[::-1],shape)) field = field.reshape(np.append(grid[::-1],shape))
table.add('curlFFT({})'.format(label), table.add('curlFFT({})'.format(label),
damask.grid_filters.curl(size[::-1],field).reshape((-1,np.prod(shape))), damask.grid_filters.curl(size[::-1],field).reshape(-1,np.prod(shape)),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -41,7 +41,7 @@ for name in filenames:
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for tensor in options.tensor: for tensor in options.tensor:
table.add('dev({})'.format(tensor), table.add('dev({})'.format(tensor),
damask.mechanics.deviatoric_part(table.get(tensor).reshape(-1,3,3)).reshape((-1,9)), damask.mechanics.deviatoric_part(table.get(tensor).reshape(-1,3,3)).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
if options.spherical: if options.spherical:
table.add('sph({})'.format(tensor), table.add('sph({})'.format(tensor),

View File

@ -54,20 +54,20 @@ for name in filenames:
F = table.get(options.f).reshape(np.append(grid[::-1],(3,3))) F = table.get(options.f).reshape(np.append(grid[::-1],(3,3)))
if options.nodal: if options.nodal:
table = damask.Table(damask.grid_filters.node_coord0(grid[::-1],size[::-1]).reshape((-1,3)), table = damask.Table(damask.grid_filters.node_coord0(grid[::-1],size[::-1]).reshape(-1,3),
{'pos':(3,)}) {'pos':(3,)})
table.add('avg({}).{}'.format(options.f,options.pos), table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_avg(size[::-1],F).reshape((-1,3)), damask.grid_filters.node_displacement_avg(size[::-1],F).reshape(-1,3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.add('fluct({}).{}'.format(options.f,options.pos), table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size[::-1],F).reshape((-1,3)), damask.grid_filters.node_displacement_fluct(size[::-1],F).reshape(-1,3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
else: else:
table.add('avg({}).{}'.format(options.f,options.pos), table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size[::-1],F).reshape((-1,3)), damask.grid_filters.cell_displacement_avg(size[::-1],F).reshape(-1,3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.add('fluct({}).{}'.format(options.f,options.pos), table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size[::-1],F).reshape((-1,3)), damask.grid_filters.cell_displacement_fluct(size[::-1],F).reshape(-1,3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -51,7 +51,7 @@ for name in filenames:
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
field = field.reshape(np.append(grid[::-1],shape)) field = field.reshape(np.append(grid[::-1],shape))
table.add('divFFT({})'.format(label), table.add('divFFT({})'.format(label),
damask.grid_filters.divergence(size[::-1],field).reshape((-1,np.prod(shape)//3)), damask.grid_filters.divergence(size[::-1],field).reshape(-1,np.prod(shape)//3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -68,7 +68,7 @@ for name in filenames:
for label in options.labels: for label in options.labels:
table.add('Gauss{}({})'.format(options.sigma,label), table.add('Gauss{}({})'.format(options.sigma,label),
ndimage.filters.gaussian_filter(table.get(label).reshape((-1)), ndimage.filters.gaussian_filter(table.get(label).reshape(-1),
options.sigma,options.order, options.sigma,options.order,
mode = 'wrap' if options.periodic else 'nearest'), mode = 'wrap' if options.periodic else 'nearest'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))

View File

@ -51,7 +51,7 @@ for name in filenames:
shape = (1,) if np.prod(field.shape)//np.prod(grid) == 1 else (3,) # scalar or vector shape = (1,) if np.prod(field.shape)//np.prod(grid) == 1 else (3,) # scalar or vector
field = field.reshape(np.append(grid[::-1],shape)) field = field.reshape(np.append(grid[::-1],shape))
table.add('gradFFT({})'.format(label), table.add('gradFFT({})'.format(label),
damask.grid_filters.gradient(size[::-1],field).reshape((-1,np.prod(shape)*3)), damask.grid_filters.gradient(size[::-1],field).reshape(-1,np.prod(shape)*3),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -86,13 +86,13 @@ for name in filenames:
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for defgrad in options.defgrad: for defgrad in options.defgrad:
F = table.get(defgrad).reshape((-1,3,3)) F = table.get(defgrad).reshape(-1,3,3)
for theStretch in stretches: for theStretch in stretches:
for theStrain in strains: for theStrain in strains:
(t,m) = parameters(theStretch,theStrain) (t,m) = parameters(theStretch,theStrain)
label = '{}({}){}'.format(theStrain,theStretch,defgrad if defgrad != 'f' else '') label = '{}({}){}'.format(theStrain,theStretch,defgrad if defgrad != 'f' else '')
table.add(label, table.add(label,
damask.mechanics.strain_tensor(F,t,m).reshape((-1,9)), damask.mechanics.strain_tensor(F,t,m).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name) table.to_ASCII(sys.stdout if name is None else name)

View File

@ -91,7 +91,7 @@ for name in filenames:
table = damask.Table(averagedDown,table.shapes,table.comments) table = damask.Table(averagedDown,table.shapes,table.comments)
coords = damask.grid_filters.cell_coord0(packedGrid,size,shift/packedGrid*size+origin) coords = damask.grid_filters.cell_coord0(packedGrid,size,shift/packedGrid*size+origin)
table.set(options.pos, coords.reshape((-1,3))) table.set(options.pos, coords.reshape(-1,3))
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name)) outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))

View File

@ -60,12 +60,12 @@ for name in filenames:
outSize = grid*packing outSize = grid*packing
data = table.data.values.reshape(tuple(grid)+(-1,)) data = table.data.values.reshape(tuple(grid)+(-1,))
blownUp = ndimage.interpolation.zoom(data,tuple(packing)+(1,),order=0,mode='nearest').reshape((outSize.prod(),-1)) blownUp = ndimage.interpolation.zoom(data,tuple(packing)+(1,),order=0,mode='nearest').reshape(outSize.prod(),-1)
table = damask.Table(blownUp,table.shapes,table.comments) table = damask.Table(blownUp,table.shapes,table.comments)
coords = damask.grid_filters.cell_coord0(outSize,size,origin) coords = damask.grid_filters.cell_coord0(outSize,size,origin)
table.set(options.pos,coords.reshape((-1,3))) table.set(options.pos,coords.reshape(-1,3))
table.set('elem',np.arange(1,outSize.prod()+1)) table.set('elem',np.arange(1,outSize.prod()+1))
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name)) outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))

View File

@ -104,7 +104,7 @@ for name in filenames:
VTKarray[color].SetName(color) VTKarray[color].SetName(color)
for tensor in options.tensor: for tensor in options.tensor:
data = damask.mechanics.symmetric(table.get(tensor).reshape((-1,3,3))).reshape((-1,9)) data = damask.mechanics.symmetric(table.get(tensor).reshape(-1,3,3)).reshape(-1,9)
VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(), VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(),
deep=True,array_type=vtk.VTK_DOUBLE) deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[tensor].SetName(tensor) VTKarray[tensor].SetName(tensor)

View File

@ -93,7 +93,7 @@ for name in filenames:
VTKarray[color].SetName(color) VTKarray[color].SetName(color)
for tensor in options.tensor: for tensor in options.tensor:
data = damask.mechanics.symmetric(table.get(tensor).reshape((-1,3,3))).reshape((-1,9)) data = damask.mechanics.symmetric(table.get(tensor).reshape(-1,3,3)).reshape(-1,9)
VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(), VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(),
deep=True,array_type=vtk.VTK_DOUBLE) deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[tensor].SetName(tensor) VTKarray[tensor].SetName(tensor)

View File

@ -30,17 +30,16 @@ for name in filenames:
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom) damask.util.croak(geom)
coord0 = damask.grid_filters.cell_coord0(geom.grid,geom.size,geom.origin).reshape((-1,3)) coord0 = damask.grid_filters.cell_coord0(geom.grid,geom.size,geom.origin).reshape(-1,3)
comments = geom.comments \ comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]), + [scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*geom.grid), 'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
"size\tx {}\ty {}\tz {}".format(*geom.size), 'size\tx {}\ty {}\tz {}'.format(*geom.size),
"origin\tx {}\ty {}\tz {}".format(*geom.origin), 'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
"homogenization\t{}".format(geom.homogenization)] 'homogenization\t{}'.format(geom.homogenization)]
table = damask.Table(coord0,{'pos':(3,)},comments) table = damask.Table(coord0,{'pos':(3,)},comments)
table.add('microstructure',geom.microstructure.reshape((-1,1),order='F')) table.add('microstructure',geom.microstructure.reshape((-1,1),order='F'))
table.to_ASCII(sys.stdout if name is None else \ table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt')
os.path.splitext(name)[0]+'.txt')

View File

@ -54,7 +54,7 @@ for name in filenames:
np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \ np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \
np.full(geom.grid.prod(),True,dtype=bool)) np.full(geom.grid.prod(),True,dtype=bool))
seeds = np.concatenate((damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape((-1,3)), seeds = np.concatenate((damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3),
microstructure), microstructure),
axis=1)[mask] axis=1)[mask]

View File

@ -399,5 +399,5 @@ def regrid(size,F,new_grid):
c[np.where(c[:,:,:,d]<0)] += outer[d] c[np.where(c[:,:,:,d]<0)] += outer[d]
c[np.where(c[:,:,:,d]>outer[d])] -= outer[d] c[np.where(c[:,:,:,d]>outer[d])] -= outer[d]
tree = spatial.cKDTree(c.reshape((-1,3)),boxsize=outer) tree = spatial.cKDTree(c.reshape(-1,3),boxsize=outer)
return tree.query(cell_coord0(new_grid,outer))[1].flatten() return tree.query(cell_coord0(new_grid,outer))[1].flatten()

View File

@ -214,7 +214,7 @@ def strain_tensor(F,t,m):
Order of the strain. Order of the strain.
""" """
F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F F_ = F.reshape(1,3,3) if F.shape == (3,3) else F
if t == 'V': if t == 'V':
B = np.matmul(F_,transpose(F_)) B = np.matmul(F_,transpose(F_))
w,n = np.linalg.eigh(B) w,n = np.linalg.eigh(B)
@ -231,7 +231,7 @@ def strain_tensor(F,t,m):
else: else:
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n)) eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
return eps.reshape((3,3)) if np.shape(F) == (3,3) else \ return eps.reshape(3,3) if np.shape(F) == (3,3) else \
eps eps

View File

@ -304,7 +304,7 @@ class Rotation:
reciprocal = False, reciprocal = False,
): ):
om = basis if isinstance(basis, np.ndarray) else np.array(basis).reshape((3,3)) om = basis if isinstance(basis, np.ndarray) else np.array(basis).reshape(3,3)
if reciprocal: if reciprocal:
om = np.linalg.inv(om.T/np.pi) # transform reciprocal basis set om = np.linalg.inv(om.T/np.pi) # transform reciprocal basis set
orthonormal = False # contains stretch orthonormal = False # contains stretch

View File

@ -167,7 +167,7 @@ class Table:
""" """
if re.match(r'[0-9]*?_',label): if re.match(r'[0-9]*?_',label):
idx,key = label.split('_',1) idx,key = label.split('_',1)
data = self.data[key].to_numpy()[:,int(idx)-1].reshape((-1,1)) data = self.data[key].to_numpy()[:,int(idx)-1].reshape(-1,1)
else: else:
data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label]) data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label])

View File

@ -18,7 +18,7 @@ def default():
x=np.concatenate((np.ones(40,dtype=int), x=np.concatenate((np.ones(40,dtype=int),
np.arange(2,42), np.arange(2,42),
np.ones(40,dtype=int)*2, np.ones(40,dtype=int)*2,
np.arange(1,41))).reshape((8,5,4)) np.arange(1,41))).reshape(8,5,4)
return Geom(x,[8e-6,5e-6,4e-6]) return Geom(x,[8e-6,5e-6,4e-6])
@pytest.fixture @pytest.fixture

View File

@ -61,7 +61,7 @@ class TestResult:
default.add_determinant('P') default.add_determinant('P')
loc = {'P': default.get_dataset_location('P'), loc = {'P': default.get_dataset_location('P'),
'det(P)':default.get_dataset_location('det(P)')} 'det(P)':default.get_dataset_location('det(P)')}
in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape((-1,1)) in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape(-1,1)
in_file = default.read_dataset(loc['det(P)'],0) in_file = default.read_dataset(loc['det(P)'],0)
assert np.allclose(in_memory,in_file) assert np.allclose(in_memory,in_file)

View File

@ -173,4 +173,4 @@ class TestTable:
['test data']) ['test data'])
t.add('s',np.array(['b','a'])) t.add('s',np.array(['b','a']))
t.sort_by('s') t.sort_by('s')
assert np.all(t.get('1_v') == np.array([2,0]).reshape((2,1))) assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))

View File

@ -31,7 +31,7 @@ class TestGridFilters:
size = np.random.random(3) size = np.random.random(3)
origin = np.random.random(3) origin = np.random.random(3)
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa
_grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape((-1,3)))'.format(mode)) _grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape(-1,3))'.format(mode))
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin) assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self): def test_displacement_fluct_equivalence(self):