old python2.7 scripts with only a loose connection to DAMASK
if needed, simply take them from an old release version
This commit is contained in:
parent
dc9aca5710
commit
bd7b5ad27b
|
@ -1,66 +0,0 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,string,scipy
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Apply filter(s) to Gwyddion data.
|
||||
""" + string.replace(scriptID,'\n','\\n')
|
||||
)
|
||||
|
||||
for option in ['opening',
|
||||
'closing',
|
||||
'erosion',
|
||||
'dilation',
|
||||
'average',
|
||||
'median',
|
||||
]:
|
||||
parser.add_option('-%s'%option[0], '--%s'%option, dest=option, type='int',
|
||||
help = 'stencil size for %s filter'%option)
|
||||
parser.set_default(option, 0)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
# ------------------------------------------ read Gwyddion data ---------------------------------------
|
||||
|
||||
for file in filenames:
|
||||
filters = ''
|
||||
header = []
|
||||
with open(file,'r') as f:
|
||||
for line in f:
|
||||
pieces = line.split()
|
||||
if pieces[0] != '#': break
|
||||
if pieces[1] == 'Width:': width = float(pieces[2])
|
||||
if pieces[1] == 'Height:': height = float(pieces[2])
|
||||
header.append(line.lstrip('#').strip())
|
||||
|
||||
elevation = np.loadtxt(file)#*1e6
|
||||
|
||||
if options.opening > 0:
|
||||
elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening)
|
||||
filters += '_opening%i'%options.opening
|
||||
if options.closing > 0:
|
||||
elevation = scipy.ndimage.morphology.grey_closing(elevation,options.closing)
|
||||
filters += '_closing%i'%options.closing
|
||||
if options.erosion > 0:
|
||||
elevation = scipy.ndimage.morphology.grey_erosion(elevation,options.erosion)
|
||||
filters += '_erosion%i'%options.erosion
|
||||
if options.dilation > 0:
|
||||
elevation = scipy.ndimage.morphology.grey_dilation(elevation,options.dilation)
|
||||
filters += '_dilation%i'%options.dilation
|
||||
if options.average > 0:
|
||||
elevation = scipy.ndimage.filters.uniform_filter(elevation,options.average)
|
||||
filters += '_avg%i'%options.average
|
||||
if options.median > 0:
|
||||
elevation = scipy.ndimage.filters.median_filter(elevation,options.median)
|
||||
filters += '_median%i'%options.median
|
||||
|
||||
np.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header))
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,string,vtk
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
scalingFactor = { \
|
||||
'm': {
|
||||
'm': 1e0,
|
||||
'mm': 1e-3,
|
||||
'µm': 1e-6,
|
||||
},
|
||||
'mm': {
|
||||
'm': 1e+3,
|
||||
'mm': 1e0,
|
||||
'µm': 1e-3,
|
||||
},
|
||||
'µm': {
|
||||
'm': 1e+6,
|
||||
'mm': 1e+3,
|
||||
'µm': 1e0,
|
||||
},
|
||||
}
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Produce VTK rectilinear grid from Gwyddion dataset exported as text.
|
||||
""" + string.replace(scriptID,'\n','\\n')
|
||||
)
|
||||
|
||||
parser.add_option('-s', '--scaling', dest='scaling', type='float',
|
||||
help = 'scaling factor for elevation data [auto]')
|
||||
|
||||
parser.set_defaults(scaling = 0.0)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
# ------------------------------------------ read Gwyddion data ---------------------------------------
|
||||
|
||||
for file in filenames:
|
||||
with open(file,'r') as f:
|
||||
for line in f:
|
||||
pieces = line.split()
|
||||
if pieces[0] != '#': break
|
||||
if len(pieces) < 2: continue
|
||||
if pieces[1] == 'Width:':
|
||||
width = float(pieces[2])
|
||||
lateralunit = pieces[3]
|
||||
if pieces[1] == 'Height:':
|
||||
height = float(pieces[2])
|
||||
lateralunit = pieces[3]
|
||||
if pieces[1] == 'Value' and pieces[2] == 'units:':
|
||||
elevationunit = pieces[3]
|
||||
|
||||
if options.scaling == 0.0:
|
||||
options.scaling = scalingFactor[lateralunit][elevationunit]
|
||||
|
||||
elevation = np.loadtxt(file)*options.scaling
|
||||
|
||||
grid = vtk.vtkRectilinearGrid()
|
||||
grid.SetDimensions(elevation.shape[1],elevation.shape[0],1)
|
||||
|
||||
xCoords = vtk.vtkDoubleArray()
|
||||
for x in np.arange(0.0,width,width/elevation.shape[1],'d'):
|
||||
xCoords.InsertNextValue(x)
|
||||
yCoords = vtk.vtkDoubleArray()
|
||||
for y in np.arange(0.0,height,height/elevation.shape[0],'d'):
|
||||
yCoords.InsertNextValue(y)
|
||||
zCoords = vtk.vtkDoubleArray()
|
||||
zCoords.InsertNextValue(0.0)
|
||||
|
||||
grid.SetXCoordinates(xCoords)
|
||||
grid.SetYCoordinates(yCoords)
|
||||
grid.SetZCoordinates(zCoords)
|
||||
|
||||
vector = vtk.vtkFloatArray()
|
||||
vector.SetName("elevation");
|
||||
vector.SetNumberOfComponents(3);
|
||||
vector.SetNumberOfTuples(np.prod(elevation.shape));
|
||||
for i,z in enumerate(np.ravel(elevation)):
|
||||
vector.SetTuple3(i,0,0,z)
|
||||
|
||||
grid.GetPointData().AddArray(vector)
|
||||
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.splitext(file)[0]+'.vtr')
|
||||
if vtk.VTK_MAJOR_VERSION <= 5:
|
||||
writer.SetInput(grid)
|
||||
else:
|
||||
writer.SetInputData(grid)
|
||||
writer.Write()
|
Loading…
Reference in New Issue