centralized functionality for ang import
This commit is contained in:
parent
b4ae91f817
commit
48c21045d7
|
@ -1,8 +1,10 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
|
@ -19,47 +21,10 @@ Convert TSL/EDAX *.ang file to ASCIItable
|
|||
""", version = scriptID)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header -----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
|
||||
# --- read comments --------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
while table.data_read(advance = False) and table.line.startswith('#'): # cautiously (non-progressing) read header
|
||||
table.info_append(table.line) # add comment to info part
|
||||
table.data_read() # wind forward
|
||||
|
||||
table.labels_clear()
|
||||
table.labels_append(['1_Euler','2_Euler','3_Euler',
|
||||
'1_pos','2_pos',
|
||||
'IQ','CI','PhaseID','Intensity','Fit',
|
||||
], # OIM Analysis 7.2 Manual, p 403 (of 517)
|
||||
reset = True)
|
||||
|
||||
# ------------------------------------------ assemble header ---------------------------------------
|
||||
|
||||
table.head_write()
|
||||
|
||||
#--- write remainder of data file ------------------------------------------------------------------
|
||||
|
||||
outputAlive = True
|
||||
while outputAlive and table.data_read():
|
||||
outputAlive = table.data_write()
|
||||
|
||||
# ------------------------------------------ finalize output ---------------------------------------
|
||||
|
||||
table.close()
|
||||
table = damask.Table.from_ang(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt')
|
||||
|
|
|
@ -3,6 +3,8 @@ import re
|
|||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from . import version
|
||||
|
||||
class Table():
|
||||
"""Store spreadsheet-like data."""
|
||||
|
||||
|
@ -20,7 +22,7 @@ class Table():
|
|||
Additional, human-readable information.
|
||||
|
||||
"""
|
||||
self.comments = [] if comments is None else [c for c in comments]
|
||||
self.comments = ['table.py v {}'.format(version)] if not comments else [c for c in comments]
|
||||
self.data = pd.DataFrame(data=data)
|
||||
self.shapes = shapes
|
||||
self.__label_condensed()
|
||||
|
@ -69,13 +71,16 @@ class Table():
|
|||
f = open(fname)
|
||||
except TypeError:
|
||||
f = fname
|
||||
f.seek(0)
|
||||
|
||||
header,keyword = f.readline().split()
|
||||
if keyword == 'header':
|
||||
header = int(header)
|
||||
else:
|
||||
raise Exception
|
||||
comments = [f.readline()[:-1] for i in range(1,header)]
|
||||
|
||||
comments = ['table.py:from_ASCII v {}'.format(version)]
|
||||
comments+= [f.readline()[:-1] for i in range(1,header)]
|
||||
labels = f.readline().split()
|
||||
|
||||
shapes = {}
|
||||
|
@ -95,6 +100,47 @@ class Table():
|
|||
|
||||
return Table(data,shapes,comments)
|
||||
|
||||
@staticmethod
|
||||
def from_ang(fname):
|
||||
"""
|
||||
Create table from TSL ang file.
|
||||
|
||||
A valid TSL ang file needs to contains the following columns:
|
||||
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
||||
* Spatial position in meters, 2 floats, label 'pos'.
|
||||
* Image quality, 1 float, label 'IQ'.
|
||||
* Confidence index, 1 float, label 'CI'.
|
||||
* Phase ID, 1 int, label 'ID'.
|
||||
* SEM signal, 1 float, label 'intensity'.
|
||||
* Fit, 1 float, label 'fit'.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
fname : file, str, or pathlib.Path
|
||||
Filename or file for reading.
|
||||
|
||||
"""
|
||||
shapes = {'eu':(3,), 'pos':(2,),
|
||||
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
|
||||
try:
|
||||
f = open(fname)
|
||||
except TypeError:
|
||||
f = fname
|
||||
f.seek(0)
|
||||
|
||||
content = f.readlines()
|
||||
|
||||
comments = ['table.py:from_ang v {}'.format(version)]
|
||||
for line in content:
|
||||
if line.startswith('#'):
|
||||
comments.append(line.strip())
|
||||
else:
|
||||
break
|
||||
|
||||
data = np.loadtxt(content)
|
||||
|
||||
return Table(data,shapes,comments)
|
||||
|
||||
@property
|
||||
def labels(self):
|
||||
return list(self.shapes.keys())
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
# TEM_PIXperUM 1.000000
|
||||
# x-star 240.000000
|
||||
# y-star 240.000000
|
||||
# z-star 240.000000
|
||||
# WorkingDistance 20.000000
|
||||
#
|
||||
# Phase 1
|
||||
# MaterialName Iron(Alpha)
|
||||
# Formula
|
||||
# Info
|
||||
# Symmetry 43
|
||||
# LatticeConstants 2.870 2.870 2.870 90.000 90.000 90.000
|
||||
# NumberFamilies 100
|
||||
# hklFamilies 9223440 0 2 32763 0.000000 32763
|
||||
# hklFamilies 0 0 0 9218712 0.000000 9218712
|
||||
# hklFamilies 0 0 3801155 0 0.000000 0
|
||||
# hklFamilies 5570652 6619251 7536754 -1203738484 0.000000 -1203738484
|
||||
# hklFamilies 7143516 5111900 7864421 32763 0.000000 32763
|
||||
# hklFamilies 6488180 7274604 6553717 9220480 0.000000 9220480
|
||||
# hklFamilies 3145820 2949169 3145777 0 0.000000 0
|
||||
# hklFamilies 3014704 7209057 103 9220488 0.000000 9220488
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9220032 0.000000 9220032
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 -1203728363 0.000000 -1203728363
|
||||
# hklFamilies 0 0 0 32763 0.000000 32763
|
||||
# hklFamilies 0 0 0 9218628 0.000000 9218628
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9218504 0.000000 9218504
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9219904 0.000000 9219904
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 0 -0.000046 0
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 256 0.000000 256
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 -1203753636 0.000000 -1203753636
|
||||
# hklFamilies 0 0 0 32763 0.000000 32763
|
||||
# hklFamilies 0 0 0 9220576 0.000000 9220576
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9218736 0.000000 9218736
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 103219574 0.000000 103219574
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9220576 0.000000 9220576
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9220692 0.000000 9220692
|
||||
# hklFamilies 1434293657 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9218584 0.000000 9218584
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9219976 0.000000 9219976
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 256 0.000000 256
|
||||
# hklFamilies 0 0 69473872 0 0.000000 0
|
||||
# hklFamilies 0 1889785611 -1546188227 -1203753636 -0.000046 -1203753636
|
||||
# hklFamilies 9224144 0 1434294456 32763 0.000000 32763
|
||||
# hklFamilies 0 9224160 0 9220672 0.000000 9220672
|
||||
# hklFamilies -1168390977 32763 851982 0 0.000000 0
|
||||
# hklFamilies 0 304 0 9218816 0.000000 9218816
|
||||
# hklFamilies 27030208 0 1434297593 0 0.000000 0
|
||||
# hklFamilies 0 9224160 0 101654020 0.000000 101654020
|
||||
# hklFamilies 9224064 0 0 0 0.000000 0
|
||||
# hklFamilies 0 25563456 0 9220672 0.000000 9220672
|
||||
# hklFamilies 9224544 0 25559040 0 0.000000 0
|
||||
# hklFamilies 0 25559788 0 9220788 0.000000 9220788
|
||||
# hklFamilies 176 0 304 24 0.000000 24
|
||||
# hklFamilies 0 25562304 0 4 0.000000 4
|
||||
# hklFamilies 9224208 0 0 0 0.000000 0
|
||||
# hklFamilies 0 281 0 9220032 0.000000 9220032
|
||||
# hklFamilies 0 0 0 0 0.000000 0
|
||||
# hklFamilies 0 -1168390977 32763 9220660 0.000000 9220660
|
||||
# hklFamilies 21 0 -1168390977 8 0.000000 8
|
||||
# hklFamilies 32763 2490388 0 24 0.000000 24
|
||||
# hklFamilies 48 0 69650048 25 0.000000 25
|
||||
# hklFamilies 0 -1216995621 32763 65535 -0.000046 65535
|
||||
# hklFamilies 0 0 25562688 1 0.000000 1
|
||||
# hklFamilies 0 0 21776 0 -0.000058 0
|
||||
# hklFamilies 25562688 0 25559724 0 0.000000 0
|
||||
# hklFamilies 0 25559040 0 1179652 0.000000 1179652
|
||||
# hklFamilies 25559724 0 25562304 32763 0.000000 32763
|
||||
# hklFamilies 0 48 0 9219904 0.000000 9219904
|
||||
# hklFamilies 25562304 0 28 0 0.000000 0
|
||||
# hklFamilies 0 0 0 8781958 0.000000 8781958
|
||||
# hklFamilies 31 0 0 0 0.000000 0
|
||||
# hklFamilies 0 0 0 103304392 0.000000 103304392
|
||||
# hklFamilies 3 0 48 0 0.000000 0
|
||||
# hklFamilies 0 9224505 0 103219694 -0.000046 103219694
|
||||
# hklFamilies 27000832 0 -1168393705 0 0.000000 0
|
||||
# hklFamilies 32763 25559040 0 9220192 0.000000 9220192
|
||||
# hklFamilies 0 32763 31 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9219872 0.000000 9219872
|
||||
# hklFamilies 69729712 0 9224640 0 0.000000 0
|
||||
# hklFamilies 0 69729904 0 1397706823 0.000000 1397706823
|
||||
# hklFamilies 69911504 0 0 59 0.000000 59
|
||||
# hklFamilies 0 27007968 0 103219200 0.000000 103219200
|
||||
# hklFamilies 0 0 -1216843775 0 0.000000 0
|
||||
# hklFamilies 32763 69911504 0 0 0.000000 0
|
||||
# hklFamilies -1168296496 32763 9225328 0 0.000000 0
|
||||
# hklFamilies 0 1434343267 0 9632160 0.000000 9632160
|
||||
# hklFamilies 69908840 0 -1216995621 0 0.000000 0
|
||||
# hklFamilies 32763 256 0 9632112 0.000000 9632112
|
||||
# hklFamilies 0 0 399376220 0 0.000000 0
|
||||
# hklFamilies 21776 1966087 4456474 262148 0.000000 262148
|
||||
# hklFamilies 9224704 0 1434198234 0 0.000000 0
|
||||
# hklFamilies 0 0 0 9704044 0.000000 9704044
|
||||
# hklFamilies -1168373699 32763 1 0 0.000000 0
|
||||
# hklFamilies 0 69911504 0 94961568 -0.000046 94961568
|
||||
# hklFamilies 1 0 69911504 0 0.000000 0
|
||||
# hklFamilies 0 10 0 9220016 0.000000 9220016
|
||||
# hklFamilies -1 0 27030208 0 0.000000 0
|
||||
# hklFamilies 0 1434488087 18 9219992 -0.000046 9219992
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
|
||||
# Categories1 1 1 1 1
|
||||
#
|
||||
# GRID: SqrGrid
|
||||
# XSTEP: 0.050000
|
||||
# YSTEP: 0.050000
|
||||
# NCOLS_ODD: 2
|
||||
# NCOLS_EVEN: 2
|
||||
# NROWS: 2
|
||||
#
|
||||
# OPERATOR:
|
||||
#
|
||||
# SAMPLEID:
|
||||
#
|
||||
# SCANID:
|
||||
#
|
||||
0.0 0.0 0.0 0.00 0.00 60.0 20.0 1 2.0 1.5
|
||||
0.0 2.0 0.0 0.05 0.00 60.0 20.0 1 2.0 1.5
|
||||
0.0 2.0 0.0 0.00 0.05 60.0 20.0 1 2.0 1.5
|
||||
0.0 0.0 1.0 0.05 0.05 60.0 20.0 1 2.0 1.5
|
|
@ -47,6 +47,17 @@ class TestTable:
|
|||
new = Table.from_ASCII(f)
|
||||
assert all(default.data==new.data)
|
||||
|
||||
def test_read_ang_str(self,reference_dir):
|
||||
new = Table.from_ang(os.path.join(reference_dir,'simple.ang'))
|
||||
assert new.data.shape == (4,10) and \
|
||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||
|
||||
def test_read_ang_file(self,reference_dir):
|
||||
f = open(os.path.join(reference_dir,'simple.ang'))
|
||||
new = Table.from_ang(f)
|
||||
assert new.data.shape == (4,10) and \
|
||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||
|
||||
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
|
||||
def test_read_strange(self,reference_dir,fname):
|
||||
with open(os.path.join(reference_dir,fname)) as f:
|
||||
|
|
Loading…
Reference in New Issue