2018-11-17 12:42:12 +05:30
|
|
|
#!/usr/bin/env python3
|
2012-09-05 20:45:11 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2016-03-02 02:42:04 +05:30
|
|
|
import os,sys
|
2014-08-22 22:28:53 +05:30
|
|
|
import numpy as np
|
|
|
|
from optparse import OptionParser
|
2013-06-30 06:09:48 +05:30
|
|
|
import damask
|
2012-09-05 20:45:11 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2013-07-10 14:45:42 +05:30
|
|
|
|
2013-05-13 18:40:31 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
#--------------------------------------------------------------------------------------------------
|
2012-04-24 17:01:18 +05:30
|
|
|
|
2016-05-12 12:24:34 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
2012-10-30 21:23:46 +05:30
|
|
|
compress geometry files with ranges "a to b" and/or multiples "n of x".
|
2014-08-25 18:23:11 +05:30
|
|
|
|
2014-08-22 22:28:53 +05:30
|
|
|
""", version = scriptID)
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2012-09-05 20:45:11 +05:30
|
|
|
(options, filenames) = parser.parse_args()
|
2012-04-23 18:16:38 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- loop over input files -------------------------------------------------------------------------
|
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
for name in filenames:
|
2015-08-21 01:12:05 +05:30
|
|
|
try:
|
|
|
|
table = damask.ASCIItable(name = name,
|
|
|
|
buffered = False, labeled = False)
|
|
|
|
except: continue
|
2015-09-24 21:04:27 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
# --- interpret header ----------------------------------------------------------------------------
|
|
|
|
|
|
|
|
table.head_read()
|
|
|
|
info,extra_header = table.head_getGeom()
|
2019-05-23 22:33:24 +05:30
|
|
|
damask.util.report_geom(info)
|
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
errors = []
|
|
|
|
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
|
|
|
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
|
|
|
if errors != []:
|
2015-09-24 21:04:27 +05:30
|
|
|
damask.util.croak(errors)
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close(dismiss = True)
|
2013-06-30 06:09:48 +05:30
|
|
|
continue
|
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- write header ---------------------------------------------------------------------------------
|
|
|
|
|
2014-08-25 18:23:11 +05:30
|
|
|
table.labels_clear()
|
|
|
|
table.info_clear()
|
|
|
|
table.info_append(extra_header+[
|
2014-01-20 20:11:56 +05:30
|
|
|
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
2015-08-08 00:33:26 +05:30
|
|
|
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
|
|
|
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
|
|
|
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
|
|
|
"homogenization\t{homog}".format(homog=info['homogenization']),
|
|
|
|
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
2013-06-30 06:09:48 +05:30
|
|
|
])
|
2014-08-25 18:23:11 +05:30
|
|
|
table.head_write()
|
2013-06-30 06:09:48 +05:30
|
|
|
|
2013-12-17 13:46:29 +05:30
|
|
|
# --- write packed microstructure information -----------------------------------------------------
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2016-10-29 13:58:34 +05:30
|
|
|
compressType = ''
|
2015-08-08 00:33:26 +05:30
|
|
|
former = start = -1
|
2012-09-05 20:45:11 +05:30
|
|
|
reps = 0
|
|
|
|
|
2014-02-04 05:14:29 +05:30
|
|
|
outputAlive = True
|
2014-10-10 14:24:48 +05:30
|
|
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
2014-08-25 18:23:11 +05:30
|
|
|
items = table.data
|
2013-06-30 06:09:48 +05:30
|
|
|
if len(items) > 2:
|
|
|
|
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
2016-10-25 00:46:29 +05:30
|
|
|
elif items[1].lower() == 'to': items = range(int(items[0]),1+int(items[2]))
|
2013-06-30 06:09:48 +05:30
|
|
|
else: items = map(int,items)
|
|
|
|
else: items = map(int,items)
|
|
|
|
|
|
|
|
for current in items:
|
2016-12-25 23:09:49 +05:30
|
|
|
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
2016-10-29 13:58:34 +05:30
|
|
|
compressType = 'to'
|
2012-09-05 20:45:11 +05:30
|
|
|
reps += 1
|
|
|
|
elif current == former and start == former:
|
2016-10-29 13:58:34 +05:30
|
|
|
compressType = 'of'
|
2012-09-05 20:45:11 +05:30
|
|
|
reps += 1
|
|
|
|
else:
|
2016-10-29 13:58:34 +05:30
|
|
|
if compressType == '':
|
2014-08-25 18:23:11 +05:30
|
|
|
table.data = []
|
2016-10-29 13:58:34 +05:30
|
|
|
elif compressType == '.':
|
2016-05-17 20:59:31 +05:30
|
|
|
table.data = [former]
|
2016-10-29 13:58:34 +05:30
|
|
|
elif compressType == 'to':
|
2016-10-29 13:51:00 +05:30
|
|
|
table.data = [start,'to',former]
|
2016-10-29 13:58:34 +05:30
|
|
|
elif compressType == 'of':
|
2016-05-17 20:59:31 +05:30
|
|
|
table.data = [reps,'of',former]
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2014-10-10 14:24:48 +05:30
|
|
|
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2016-10-29 13:58:34 +05:30
|
|
|
compressType = '.'
|
2012-09-05 20:45:11 +05:30
|
|
|
start = current
|
|
|
|
reps = 1
|
|
|
|
|
|
|
|
former = current
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2014-08-25 18:23:11 +05:30
|
|
|
table.data = {
|
2016-05-17 20:59:31 +05:30
|
|
|
'.' : [former],
|
2016-10-29 14:00:42 +05:30
|
|
|
'to': [start,'to',former],
|
2016-05-17 20:59:31 +05:30
|
|
|
'of': [reps,'of',former],
|
2016-10-29 13:58:34 +05:30
|
|
|
}[compressType]
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
2014-02-04 05:14:29 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- output finalization --------------------------------------------------------------------------
|
2012-09-05 20:45:11 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close() # close ASCII table
|