2016-05-11 14:31:39 +05:30
|
|
|
#!/usr/bin/env python2
|
2012-09-05 20:45:11 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2016-03-02 02:42:04 +05:30
|
|
|
import os,sys
|
2014-08-22 22:28:53 +05:30
|
|
|
import numpy as np
|
|
|
|
from optparse import OptionParser
|
2013-06-30 06:09:48 +05:30
|
|
|
import damask
|
2012-09-05 20:45:11 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2013-07-10 14:45:42 +05:30
|
|
|
|
2013-05-13 18:40:31 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
#--------------------------------------------------------------------------------------------------
|
2012-04-24 17:01:18 +05:30
|
|
|
|
2016-05-12 12:24:34 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
2012-10-30 21:23:46 +05:30
|
|
|
compress geometry files with ranges "a to b" and/or multiples "n of x".
|
2014-08-25 18:23:11 +05:30
|
|
|
|
2014-08-22 22:28:53 +05:30
|
|
|
""", version = scriptID)
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2012-09-05 20:45:11 +05:30
|
|
|
(options, filenames) = parser.parse_args()
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- loop over input files -------------------------------------------------------------------------
|
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
for name in filenames:
|
2015-08-21 01:12:05 +05:30
|
|
|
try:
|
|
|
|
table = damask.ASCIItable(name = name,
|
|
|
|
buffered = False, labeled = False)
|
|
|
|
except: continue
|
2015-09-24 21:04:27 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
# --- interpret header ----------------------------------------------------------------------------
|
|
|
|
|
|
|
|
table.head_read()
|
|
|
|
info,extra_header = table.head_getGeom()
|
|
|
|
|
2015-09-24 21:04:27 +05:30
|
|
|
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
2015-08-08 00:33:26 +05:30
|
|
|
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
|
|
|
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
|
|
|
'homogenization: %i'%info['homogenization'],
|
|
|
|
'microstructures: %i'%info['microstructures'],
|
|
|
|
])
|
|
|
|
|
|
|
|
errors = []
|
|
|
|
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
|
|
|
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
|
|
|
if errors != []:
|
2015-09-24 21:04:27 +05:30
|
|
|
damask.util.croak(errors)
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close(dismiss = True)
|
2013-06-30 06:09:48 +05:30
|
|
|
continue
|
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- write header ---------------------------------------------------------------------------------
|
|
|
|
|
2014-08-25 18:23:11 +05:30
|
|
|
table.labels_clear()
|
|
|
|
table.info_clear()
|
|
|
|
table.info_append(extra_header+[
|
2014-01-20 20:11:56 +05:30
|
|
|
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
2015-08-08 00:33:26 +05:30
|
|
|
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
|
|
|
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
|
|
|
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
|
|
|
"homogenization\t{homog}".format(homog=info['homogenization']),
|
|
|
|
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
2013-06-30 06:09:48 +05:30
|
|
|
])
|
2014-08-25 18:23:11 +05:30
|
|
|
table.head_write()
|
2013-06-30 06:09:48 +05:30
|
|
|
|
2013-12-17 13:46:29 +05:30
|
|
|
# --- write packed microstructure information -----------------------------------------------------
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2012-09-05 20:45:11 +05:30
|
|
|
type = ''
|
2015-08-08 00:33:26 +05:30
|
|
|
former = start = -1
|
2012-09-05 20:45:11 +05:30
|
|
|
reps = 0
|
|
|
|
|
2014-02-04 05:14:29 +05:30
|
|
|
outputAlive = True
|
2014-10-10 14:24:48 +05:30
|
|
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
2014-08-25 18:23:11 +05:30
|
|
|
items = table.data
|
2013-06-30 06:09:48 +05:30
|
|
|
if len(items) > 2:
|
|
|
|
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
|
|
|
elif items[1].lower() == 'to': items = xrange(int(items[0]),1+int(items[2]))
|
|
|
|
else: items = map(int,items)
|
|
|
|
else: items = map(int,items)
|
|
|
|
|
|
|
|
for current in items:
|
2012-09-05 20:45:11 +05:30
|
|
|
if current == former+1 and start+reps == former+1:
|
|
|
|
type = 'to'
|
|
|
|
reps += 1
|
|
|
|
elif current == former and start == former:
|
|
|
|
type = 'of'
|
|
|
|
reps += 1
|
|
|
|
else:
|
2014-02-04 05:14:29 +05:30
|
|
|
if type == '':
|
2014-08-25 18:23:11 +05:30
|
|
|
table.data = []
|
2014-02-04 05:14:29 +05:30
|
|
|
elif type == '.':
|
2014-08-25 18:23:11 +05:30
|
|
|
table.data = [str(former)]
|
2014-02-04 05:14:29 +05:30
|
|
|
elif type == 'to':
|
2015-08-08 00:33:26 +05:30
|
|
|
table.data = ['{0} to {1}'.format(former-reps+1,former)]
|
2014-02-04 05:14:29 +05:30
|
|
|
elif type == 'of':
|
2015-08-08 00:33:26 +05:30
|
|
|
table.data = ['{0} of {1}'.format(reps,former)]
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2014-10-10 14:24:48 +05:30
|
|
|
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2012-09-05 20:45:11 +05:30
|
|
|
type = '.'
|
|
|
|
start = current
|
|
|
|
reps = 1
|
|
|
|
|
|
|
|
former = current
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2014-08-25 18:23:11 +05:30
|
|
|
table.data = {
|
2015-08-08 00:33:26 +05:30
|
|
|
'.' : [str(former)],
|
|
|
|
'to': ['%i to %i'%(former-reps+1,former)],
|
|
|
|
'of': ['%i of %i'%(reps,former)],
|
|
|
|
}[type]
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- output finalization --------------------------------------------------------------------------
|
2012-09-05 20:45:11 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close() # close ASCII table
|