2016-10-10 04:31:38 +05:30
|
|
|
#!/usr/bin/env python2.7
|
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
|
|
|
|
|
|
|
# ------------------------------------------------------------------- #
|
|
|
|
# NOTE: #
|
|
|
|
# 1. Not all output is defined in the DS_HDF5.xml, please add new #
|
|
|
|
# new one to the system wide definition file #
|
|
|
|
# <DAMASK_ROOT>/lib/damask/DS_HDF5.xml #
|
|
|
|
# or specify your own when initializing HDF5 class #
|
2016-10-13 05:50:15 +05:30
|
|
|
# 2. Somehow the point cloud structure cannot be properly handled #
|
|
|
|
# by Xdmf, which is a descriptive wrapper for visualizing HDF5 #
|
|
|
|
# using Paraview. The current solution is using cell structured #
|
|
|
|
# HDF5 so that Xdmf can describe the data shape as a rectangular #
|
|
|
|
# mesh rather than polyvertex. #
|
2016-10-13 21:03:38 +05:30
|
|
|
# TODO: #
|
|
|
|
# 1. remove the <ASCII_TABLE>._tmp file, basically need a way to #
|
|
|
|
# just load data from ASCII table. #
|
|
|
|
# 2. a progress monitor when transferring data from ASCII table #
|
|
|
|
# to HDF5. #
|
|
|
|
# 3. a more flexible way handle the data structure rather than a #
|
|
|
|
# xml file. #
|
2016-10-10 04:31:38 +05:30
|
|
|
# ------------------------------------------------------------------- #
|
|
|
|
|
|
|
|
import os
|
|
|
|
import damask
|
2016-10-10 04:42:45 +05:30
|
|
|
import numpy as np
|
2016-10-10 04:31:38 +05:30
|
|
|
from optparse import OptionParser
|
|
|
|
|
2016-10-13 05:50:15 +05:30
|
|
|
|
2016-10-10 04:31:38 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
2016-10-15 00:08:09 +05:30
|
|
|
scriptID = ' '.join([scriptName, damask.version])
|
2016-10-10 04:31:38 +05:30
|
|
|
|
|
|
|
|
2016-10-13 05:50:15 +05:30
|
|
|
# ----- helper function ----- #
|
|
|
|
def get_rectMshVectors(xyz_array, posNum):
|
2016-10-15 00:08:09 +05:30
|
|
|
"""Get Vx, Vy, Vz for rectLinear grid"""
|
2016-10-14 21:36:09 +05:30
|
|
|
# need some improvement, and only works for rectangular grid
|
|
|
|
v = sorted(list(set(xyz_array[:, posNum])))
|
|
|
|
v_interval = (v[2]+v[1])/2.0 - (v[1]+v[0])/2.0
|
|
|
|
v_start = (v[1]+v[0])/2.0 - v_interval
|
|
|
|
v_end = (v[-1]+v[-2])/2.0 + v_interval
|
|
|
|
V = np.linspace(v_start, v_end, len(v)+1)
|
|
|
|
return V
|
2016-10-13 05:50:15 +05:30
|
|
|
|
|
|
|
|
|
|
|
# ----- MAIN ---- #
|
2016-10-10 04:31:38 +05:30
|
|
|
desp_msg = "Convert DAMASK ascii table to HDF5 file"
|
|
|
|
parser = OptionParser(option_class=damask.extendableOption,
|
2016-10-15 00:08:09 +05:30
|
|
|
usage='%prog options [file[s]]',
|
|
|
|
description=desp_msg,
|
|
|
|
version=scriptID)
|
2016-10-10 04:31:38 +05:30
|
|
|
parser.add_option('-D', '--DefinitionFile',
|
2016-10-15 00:08:09 +05:30
|
|
|
dest='storage definition file',
|
|
|
|
type='string',
|
|
|
|
metavar='string',
|
|
|
|
help='definition file for H5 data storage')
|
|
|
|
parser.add_option('-p', '--pos', '--position',
|
|
|
|
dest='pos',
|
|
|
|
type='string', metavar='string',
|
|
|
|
help='label of coordinates [%default]')
|
2016-10-10 04:31:38 +05:30
|
|
|
|
2016-10-13 05:50:15 +05:30
|
|
|
parser.set_defaults(DefinitionFile='default',
|
2016-10-15 00:08:09 +05:30
|
|
|
pos='pos')
|
2016-10-10 04:31:38 +05:30
|
|
|
|
2016-10-15 00:08:09 +05:30
|
|
|
(options, filenames) = parser.parse_args()
|
2016-10-10 04:31:38 +05:30
|
|
|
|
2016-10-10 19:16:11 +05:30
|
|
|
filename = filenames[0]
|
|
|
|
|
2016-10-10 04:31:38 +05:30
|
|
|
if options.DefinitionFile == 'default':
|
2016-10-14 21:36:09 +05:30
|
|
|
defFile = None
|
2016-10-10 04:31:38 +05:30
|
|
|
else:
|
2016-10-14 21:36:09 +05:30
|
|
|
defFile = options.DefinitionFile
|
2016-10-10 04:31:38 +05:30
|
|
|
|
|
|
|
# ----- read in data using DAMASK ASCII table class ----- #
|
2016-10-10 19:16:11 +05:30
|
|
|
asciiTable = damask.ASCIItable(name=filename, buffered=False)
|
2016-10-10 04:31:38 +05:30
|
|
|
asciiTable.head_read()
|
|
|
|
asciiTable.data_readArray()
|
|
|
|
incNum = int(asciiTable.data[asciiTable.label_index('inc'), 0])
|
2016-10-10 04:42:45 +05:30
|
|
|
fullTable = np.copy(asciiTable.data) # deep copy all data, just to be safe
|
2016-10-10 04:43:44 +05:30
|
|
|
labels = asciiTable.labels()
|
|
|
|
labels_idx = [asciiTable.label_index(label) for label in labels]
|
2016-10-25 00:46:29 +05:30
|
|
|
featuresDim = [labels_idx[i+1] - labels_idx[i] for i in range(len(labels)-1)]
|
2016-10-10 04:31:38 +05:30
|
|
|
featuresDim.append(fullTable.shape[1] - labels_idx[-1])
|
|
|
|
|
2016-10-13 05:50:15 +05:30
|
|
|
# ----- figure out size and grid ----- #
|
|
|
|
pos_idx = asciiTable.label_index('pos')
|
|
|
|
xyz_array = asciiTable.data[:, pos_idx:pos_idx+3]
|
|
|
|
Vx = get_rectMshVectors(xyz_array, 0)
|
|
|
|
Vy = get_rectMshVectors(xyz_array, 1)
|
|
|
|
Vz = get_rectMshVectors(xyz_array, 2)
|
|
|
|
# use the dimension of the rectangular grid to reshape all other data
|
2016-10-13 22:52:37 +05:30
|
|
|
mshGridDim = [len(Vx)-1, len(Vy)-1, len(Vz)-1]
|
2016-10-13 05:50:15 +05:30
|
|
|
|
2016-10-18 03:54:47 +05:30
|
|
|
# ----- compose cmd log ----- #
|
|
|
|
cmd_log = " ".join([scriptID, filename])
|
|
|
|
|
2016-10-10 04:31:38 +05:30
|
|
|
# ----- create a new HDF5 file and save the data -----#
|
2016-10-13 22:52:37 +05:30
|
|
|
# force remove existing HDF5 file
|
|
|
|
h5fName = filename.replace(".txt", ".h5")
|
|
|
|
try:
|
2016-10-14 21:36:09 +05:30
|
|
|
os.remove(h5fName)
|
2016-10-13 22:52:37 +05:30
|
|
|
except OSError:
|
2016-10-14 21:36:09 +05:30
|
|
|
pass
|
2016-10-13 22:52:37 +05:30
|
|
|
h5f = damask.H5Table(h5fName,
|
2016-10-10 04:31:38 +05:30
|
|
|
new_file=True,
|
|
|
|
dsXMLFile=defFile)
|
|
|
|
# adding increment number as root level attributes
|
|
|
|
h5f.add_attr('inc', incNum)
|
2016-10-13 05:50:15 +05:30
|
|
|
# add the mesh grid data now
|
2016-10-18 03:54:47 +05:30
|
|
|
h5f.add_data("Vx", Vx, cmd_log=cmd_log)
|
|
|
|
h5f.add_data("Vy", Vy, cmd_log=cmd_log)
|
|
|
|
h5f.add_data("Vz", Vz, cmd_log=cmd_log)
|
2016-10-13 05:50:15 +05:30
|
|
|
|
|
|
|
# add the rest of data from table
|
2016-10-13 22:52:37 +05:30
|
|
|
labelsProcessed = ['inc']
|
2016-10-25 00:46:29 +05:30
|
|
|
for fi in range(len(labels)):
|
2016-10-14 21:36:09 +05:30
|
|
|
featureName = labels[fi]
|
|
|
|
# remove trouble maker "("" and ")" from label/feature name
|
|
|
|
if "(" in featureName:
|
|
|
|
featureName = featureName.replace("(", "")
|
|
|
|
if ")" in featureName:
|
|
|
|
featureName = featureName.replace(")", "")
|
|
|
|
# skip increment and duplicated columns in the ASCII table
|
|
|
|
if featureName in labelsProcessed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
featureIdx = labels_idx[fi]
|
|
|
|
featureDim = featuresDim[fi]
|
|
|
|
# grab the data hook
|
|
|
|
dataset = fullTable[:, featureIdx:featureIdx+featureDim]
|
|
|
|
# mapping 2D data onto a 3D rectangular mesh to get 4D data
|
|
|
|
# WARNING: In paraview, the data for a recmesh is mapped as:
|
|
|
|
# --> len(z), len(y), len(x), size(data)
|
|
|
|
# dataset = dataset.reshape((mshGridDim[0],
|
|
|
|
# mshGridDim[1],
|
|
|
|
# mshGridDim[2],
|
|
|
|
# dataset.shape[1]))
|
|
|
|
# write out data
|
2016-10-25 00:46:29 +05:30
|
|
|
print("adding {}...".format(featureName))
|
2016-10-18 03:54:47 +05:30
|
|
|
h5f.add_data(featureName, dataset, cmd_log=cmd_log)
|
2016-10-14 21:36:09 +05:30
|
|
|
# write down the processed label
|
|
|
|
labelsProcessed.append(featureName)
|