2011-12-05 18:35:42 +05:30
|
|
|
import os,sys,string,re
|
2011-11-19 01:29:20 +05:30
|
|
|
|
2011-11-23 20:24:05 +05:30
|
|
|
|
2011-11-09 17:39:54 +05:30
|
|
|
class DAMASK_TOOLS():
|
2011-11-19 01:29:20 +05:30
|
|
|
|
2011-12-05 18:35:42 +05:30
|
|
|
__slots__ = ['pathInfo',
|
|
|
|
]
|
|
|
|
|
|
|
|
def __init__(self,rootRelation = '.'):
|
|
|
|
self.pathInfo = {\
|
2011-12-07 00:08:03 +05:30
|
|
|
'acml': '/opt/acml4.4.0',
|
|
|
|
'fftw': '.',
|
2011-12-05 18:35:42 +05:30
|
|
|
'msc': '/msc',
|
|
|
|
}
|
|
|
|
self.get_pathInfo(rootRelation)
|
|
|
|
|
2011-12-07 00:08:03 +05:30
|
|
|
def relPath(self,relative = '.'):
|
|
|
|
return os.path.join(self.rootDir(),relative)
|
|
|
|
|
2011-12-05 18:35:42 +05:30
|
|
|
def rootDir(self,rootRelation = '.'): #getting pathinfo
|
|
|
|
damask_root = os.getenv('DAMASK_ROOT')
|
|
|
|
if damask_root == '' or damask_root == None: damask_root = os.path.join(os.path.dirname(sys.argv[0]),rootRelation)
|
|
|
|
return damask_root
|
|
|
|
|
|
|
|
def binDir(self,rootRelation = '.'): #getting pathinfo
|
|
|
|
damask_bin = os.getenv('DAMASK_BIN')
|
2011-12-07 00:08:03 +05:30
|
|
|
if damask_bin == '' or damask_bin == None: damask_bin = self.relPath('bin/')
|
2011-12-05 18:35:42 +05:30
|
|
|
return damask_bin
|
|
|
|
|
|
|
|
def get_pathInfo(self,rootRelation = '.'): #getting pathinfo
|
|
|
|
damask_root = self.rootDir(rootRelation)
|
|
|
|
|
|
|
|
try: # check for user-defined pathinfo
|
2011-12-07 00:08:03 +05:30
|
|
|
file = open(self.relPath('lib/pathinfo'))
|
2011-12-05 18:35:42 +05:30
|
|
|
content = file.readlines()
|
|
|
|
file.close()
|
|
|
|
for line in content:
|
2011-12-07 00:08:03 +05:30
|
|
|
self.pathInfo[line.split()[0].lower()] = os.path.normpath(os.path.join(self.relPath('lib/'),line.split()[1]))
|
2011-12-05 18:35:42 +05:30
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2011-11-19 01:29:20 +05:30
|
|
|
def check_env(self):
|
|
|
|
import os
|
|
|
|
if os.getenv('DAMASK_ROOT') is None:
|
|
|
|
print('No DAMASK_ROOT environment variable, did you run DAMASK/installation/setup_shellrc?')
|
2011-12-07 00:08:03 +05:30
|
|
|
return os.getenv('DAMASK_ROOT') != None
|
2011-11-17 19:35:39 +05:30
|
|
|
|
2011-11-23 20:24:05 +05:30
|
|
|
|
2011-12-05 18:35:42 +05:30
|
|
|
|
2011-11-23 20:24:05 +05:30
|
|
|
class ASCII_TABLE():
|
2011-11-23 21:12:18 +05:30
|
|
|
'''
|
|
|
|
There should be a doc string here :)
|
|
|
|
'''
|
2011-11-23 20:24:05 +05:30
|
|
|
import sys
|
|
|
|
__slots__ = ['__IO__',
|
|
|
|
'info',
|
|
|
|
'labels',
|
|
|
|
'data',
|
|
|
|
]
|
|
|
|
|
|
|
|
def __init__(self,
|
|
|
|
fileIn = sys.stdin,
|
|
|
|
fileOut = sys.stdout,
|
|
|
|
buffered = True):
|
|
|
|
self.__IO__ = {'in': fileIn,
|
|
|
|
'out':fileOut,
|
|
|
|
'output':[],
|
|
|
|
'buffered':buffered,
|
|
|
|
'validReadSize': 0,
|
|
|
|
}
|
|
|
|
self.info = []
|
|
|
|
self.labels = []
|
|
|
|
self.data = []
|
|
|
|
|
|
|
|
def output_write(self,
|
|
|
|
what):
|
|
|
|
if isinstance(what,list):
|
|
|
|
for item in what: self.output_write(item)
|
|
|
|
else:
|
|
|
|
self.__IO__['output'] += [str(what)]
|
|
|
|
self.__IO__['buffered'] or self.output_flush()
|
|
|
|
|
|
|
|
def output_flush(self,
|
|
|
|
clear = True):
|
|
|
|
self.__IO__['output'] == [] or self.__IO__['out'].write('\n'.join(self.__IO__['output']) + '\n')
|
|
|
|
if clear: self.output_clear()
|
|
|
|
|
|
|
|
def output_clear(self):
|
|
|
|
self.__IO__['output'] = []
|
|
|
|
|
|
|
|
def head_read(self):
|
2011-11-23 21:12:18 +05:30
|
|
|
'''
|
|
|
|
get column labels by either read the first row, or
|
|
|
|
--if keyword "head[*]" is present-- the last line of the header
|
|
|
|
'''
|
2011-11-23 20:24:05 +05:30
|
|
|
try:
|
|
|
|
self.__IO__['in'].seek(0)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
firstline = self.__IO__['in'].readline()
|
|
|
|
m = re.search('(\d+)\s*head', firstline.lower())
|
|
|
|
if m:
|
|
|
|
self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))]
|
|
|
|
self.labels = self.__IO__['in'].readline().split()
|
|
|
|
else:
|
|
|
|
self.info = []
|
|
|
|
self.labels = firstline.split()
|
|
|
|
self.__IO__['validReadSize'] = len(self.labels)
|
|
|
|
|
|
|
|
def head_write(self):
|
|
|
|
self.output_write (['%i\theader'%(len(self.info)+1),
|
|
|
|
self.info,
|
|
|
|
'\t'.join(self.labels)])
|
|
|
|
|
|
|
|
def labels_append(self,
|
|
|
|
what):
|
|
|
|
if isinstance(what,list):
|
|
|
|
for item in what: self.labels_append(item)
|
|
|
|
else: self.labels += [str(what)]
|
|
|
|
|
|
|
|
def info_append(self,
|
|
|
|
what):
|
|
|
|
if isinstance(what,list):
|
|
|
|
for item in what: self.info_append(item)
|
|
|
|
else: self.info += [str(what)]
|
|
|
|
|
|
|
|
def data_read(self):
|
|
|
|
line = self.__IO__['in'].readline()
|
|
|
|
items = line.split()[:self.__IO__['validReadSize']] # get next data row
|
|
|
|
self.data = {False: [],
|
|
|
|
True: items}[len(items) == self.__IO__['validReadSize']] # take if correct number of entries
|
|
|
|
return line != ''
|
|
|
|
|
|
|
|
def data_write(self):
|
|
|
|
if isinstance(self.data[0],list):
|
|
|
|
self.output_write (['\t'.join(map(str,items)) for items in self.data])
|
|
|
|
else:
|
|
|
|
self.output_write ('\t'.join(map(str,self.data)))
|
|
|
|
|
|
|
|
def data_append(self,
|
|
|
|
what):
|
|
|
|
if isinstance(what,list):
|
|
|
|
for item in what: self.data_append(item)
|
|
|
|
else: self.data += [str(what)]
|
|
|
|
|
|
|
|
|
2011-11-17 19:35:39 +05:30
|
|
|
class MATERIAL_CONFIG():
|
2011-11-23 21:12:18 +05:30
|
|
|
'''
|
|
|
|
Reads, manipulates and writes material.config files
|
|
|
|
'''
|
2011-11-23 20:24:05 +05:30
|
|
|
__slots__ = ['data']
|
2011-11-21 23:31:08 +05:30
|
|
|
|
2011-11-19 01:29:20 +05:30
|
|
|
def __init__(self):
|
2011-11-21 22:10:23 +05:30
|
|
|
self.parts = [
|
|
|
|
'homogenization',
|
|
|
|
'microstructure',
|
|
|
|
'crystallite',
|
|
|
|
'phase',
|
|
|
|
'texture',
|
2011-11-21 23:31:08 +05:30
|
|
|
] # ordered (!) list of parts
|
2011-11-19 01:29:20 +05:30
|
|
|
self.data = {\
|
|
|
|
'homogenization': {'__order__': []},
|
|
|
|
'microstructure': {'__order__': []},
|
|
|
|
'crystallite': {'__order__': []},
|
|
|
|
'phase': {'__order__': []},
|
|
|
|
'texture': {'__order__': []},
|
|
|
|
}
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
me = []
|
2011-11-21 22:10:23 +05:30
|
|
|
for part in self.parts:
|
2011-11-21 23:31:08 +05:30
|
|
|
print 'doing',part
|
2011-11-19 01:29:20 +05:30
|
|
|
me += ['','#-----------------------------#','<%s>'%part,'#-----------------------------#',]
|
|
|
|
for section in self.data[part]['__order__']:
|
2011-11-21 23:31:08 +05:30
|
|
|
me += ['','[%s] %s'%(section,'-'*max(0,27-len(section))),'',]
|
2011-11-19 01:29:20 +05:30
|
|
|
for key in self.data[part][section]['__order__']:
|
|
|
|
if key.startswith('(') and key.endswith(')'): # multiple (key)
|
|
|
|
me += ['%s\t%s'%(key,' '.join(values)) for values in self.data[part][section][key]]
|
|
|
|
else: # plain key
|
2011-11-21 23:31:08 +05:30
|
|
|
me += ['%s\t%s'%(key,' '.join(map(str,self.data[part][section][key])))]
|
2011-11-19 01:29:20 +05:30
|
|
|
|
|
|
|
return '\n'.join(me)
|
2011-11-17 19:35:39 +05:30
|
|
|
|
2011-11-19 01:29:20 +05:30
|
|
|
def parse_data(self, part=None, sections=[], content=None):
|
|
|
|
|
|
|
|
re_part = re.compile(r'^<(.+)>$') # pattern for part
|
|
|
|
re_sec = re.compile(r'^\[(.+)\]$') # pattern for section
|
|
|
|
|
|
|
|
name_section = ''
|
|
|
|
idx_section = 0
|
|
|
|
active = False
|
|
|
|
|
|
|
|
for line in content:
|
|
|
|
line = line.split('#')[0].strip() # kill comments and extra whitespace
|
|
|
|
if line: # content survives...
|
|
|
|
match_part = re_part.match(line)
|
|
|
|
if match_part: # found <part> separator
|
|
|
|
active = (match_part.group(1) == part) # only active in <part>
|
|
|
|
continue
|
|
|
|
if active:
|
|
|
|
match_sec = re_sec.match(line)
|
|
|
|
if match_sec: # found [section]
|
|
|
|
name_section = match_sec.group(1) # remember name ...
|
|
|
|
if '__order__' not in self.data[part]: self.data[part]['__order__'] = []
|
|
|
|
self.data[part]['__order__'].append(name_section) # ... and position
|
|
|
|
self.data[part][name_section] = {'__order__':[]}
|
|
|
|
continue
|
|
|
|
|
|
|
|
if sections == [] or name_section in sections: # respect subset
|
|
|
|
items = line.split()
|
|
|
|
if items[0] not in self.data[part][name_section]: # first encounter of key?
|
|
|
|
self.data[part][name_section][items[0]] = [] # create item
|
|
|
|
self.data[part][name_section]['__order__'].append(items[0])
|
|
|
|
if items[0].startswith('(') and items[0].endswith(')'): # multiple "(key)"
|
|
|
|
self.data[part][name_section][items[0]].append(items[1:])
|
|
|
|
else: # plain key
|
|
|
|
self.data[part][name_section][items[0]] = items[1:]
|
|
|
|
|
2011-11-21 22:10:23 +05:30
|
|
|
def read(self,file=None):
|
|
|
|
f=open(file,'r')
|
|
|
|
c=f.readlines()
|
|
|
|
f.close()
|
|
|
|
for p in self.parts:
|
|
|
|
self.parse_data(part=p, content=c)
|
|
|
|
|
|
|
|
def write(self,file='material.config', overwrite=False):
|
|
|
|
if overwrite is False:
|
|
|
|
if os.path.exists(file):
|
|
|
|
i=1
|
|
|
|
while os.path.exists(file+'_%i'%i):i+=1
|
2011-11-21 23:31:08 +05:30
|
|
|
file+='_%i'%i
|
2011-11-21 22:10:23 +05:30
|
|
|
print('Writing material data to file %s'%file)
|
|
|
|
f=open(file,'w')
|
|
|
|
f.write(str(self))
|
|
|
|
f.close()
|
2011-11-23 20:51:16 +05:30
|
|
|
return file
|
2011-11-21 23:31:08 +05:30
|
|
|
|
|
|
|
def add_data(self, part=None, section=None, data={}):
|
2011-11-23 20:51:16 +05:30
|
|
|
'''Generic data adding/updating'''
|
2011-11-21 23:31:08 +05:30
|
|
|
if part not in self.parts: raise Exception('invalid part %s'%part)
|
|
|
|
if section not in self.data[part]: self.data[part]['__order__'] += [section]
|
|
|
|
self.data[part][section] = data
|
|
|
|
|
2011-11-21 22:10:23 +05:30
|
|
|
def add_homogenization(self, label='', type='', Ngrains=None):
|
2011-11-21 23:31:08 +05:30
|
|
|
if type.lower() == 'isostrain':
|
|
|
|
self.add_data(part='homogenization',
|
|
|
|
section=label,
|
|
|
|
data={'type':[type],
|
|
|
|
'Ngrains':[Ngrains],
|
|
|
|
'__order__':['type','Ngrains']
|
|
|
|
}
|
|
|
|
)
|
|
|
|
elif type.lower() == 'rgc':
|
|
|
|
raise Exception('Please implement me')
|
2011-11-19 01:29:20 +05:30
|
|
|
|
2011-11-21 22:10:23 +05:30
|
|
|
def add_crystallite(self, label='', output=[]):
|
2011-11-23 20:51:16 +05:30
|
|
|
self.add_data(part='crystallite',
|
|
|
|
section=label,
|
|
|
|
data={'(output)':[[o] for o in output],
|
|
|
|
'__order__':'(output)'})
|
|
|
|
|
2011-11-21 22:10:23 +05:30
|
|
|
def add_texture(self, label='',type='', eulers=[], scatter=0., fraction=1.):
|
|
|
|
''' Experimental! Needs expansion to multi-component textures...'''
|
2011-11-21 23:31:08 +05:30
|
|
|
if type == '(gauss)':
|
2011-11-23 20:51:16 +05:30
|
|
|
texture={type:[['phi1','%f'%float(eulers[0]),'Phi','%f'%float(eulers[1]), 'phi2','%f'%float(eulers[2]),'scatter','%f'%float(scatter), 'fraction','%f'%fraction]],'__order__':label}
|
|
|
|
#self.data['texture'][label]=texture
|
|
|
|
#if len(self.data['texture'])>old_len: # added new label
|
|
|
|
# self.data['texture']['__order__'].append(label)
|
|
|
|
else:
|
|
|
|
raise Exception('Please implement me.')
|
|
|
|
self.add_data(part='texture',section=label, data=texture)
|
|
|
|
|
|
|
|
def add_phase(self, file='', label='', newlabel=None, phase=None):
|
2011-11-21 22:10:23 +05:30
|
|
|
''' USAGE:
|
2011-11-23 20:51:16 +05:30
|
|
|
- read phase "label" from file
|
2011-11-21 22:10:23 +05:30
|
|
|
OR
|
2011-11-23 20:51:16 +05:30
|
|
|
- phase is dict with one key
|
|
|
|
'''
|
2011-11-21 22:10:23 +05:30
|
|
|
if file and label and (phase is None):
|
2011-11-21 23:31:08 +05:30
|
|
|
other=MATERIAL_CONFIG()
|
|
|
|
other.read(file=file)
|
|
|
|
phase={label:other.data['phase'][label]}
|
2011-11-21 22:10:23 +05:30
|
|
|
label=None
|
|
|
|
print phase
|
|
|
|
if len(phase)==1 and label is None:
|
2011-11-23 20:51:16 +05:30
|
|
|
if newlabel:
|
|
|
|
label=newlabel
|
|
|
|
else:
|
|
|
|
label=phase.keys()[0]
|
|
|
|
print('Adding phase %s'%label)
|
|
|
|
self.add_data(part='phase', section=label, data=phase)
|
2011-11-21 22:10:23 +05:30
|
|
|
else:
|
|
|
|
raise Exception('Wrong arguments')
|
|
|
|
|
2011-11-21 23:31:08 +05:30
|
|
|
def add_microstructure(self, label=None,
|
2011-11-23 20:51:16 +05:30
|
|
|
crystallite=None, # label
|
|
|
|
phases=None, # list of labels
|
|
|
|
textures=None, # list of labels
|
|
|
|
fractions=None): # list of floats
|
2011-11-21 22:10:23 +05:30
|
|
|
''' Experimental! Needs expansion to multi-constituent microstructures...'''
|
|
|
|
c=self.data['crystallite']['__order__'].index(crystallite)+1
|
|
|
|
constituent=phases[:]
|
2011-11-23 20:51:16 +05:30
|
|
|
if fractions is None:
|
|
|
|
fractions=[1./len(phases)]*len(phases)
|
2011-11-21 22:10:23 +05:30
|
|
|
for i in range(len(phases)):
|
|
|
|
p=self.data['phase']['__order__'].index(phases[i])+1
|
|
|
|
t=self.data['texture']['__order__'].index(textures[i])+1
|
|
|
|
f=fractions[i]
|
|
|
|
constituent[i]=['phase','%i'%p,'texture','%i'%t,'fraction','%f'%f]
|
2011-11-23 20:51:16 +05:30
|
|
|
data={'crystallite':['%i'%c],
|
2011-11-21 22:10:23 +05:30
|
|
|
'(constituent)':constituent,
|
2011-11-23 20:51:16 +05:30
|
|
|
'__order__':['crystallite','(constituent)']}
|
|
|
|
self.add_data(part='microstructure',section=label,data=data)
|
|
|
|
|
2011-11-23 21:12:18 +05:30
|
|
|
def change_value(self, part=None,
|
|
|
|
section=None,
|
|
|
|
key=None,
|
|
|
|
value=None):
|
2011-11-23 20:51:16 +05:30
|
|
|
if type(value) is not type([]):
|
|
|
|
if type(value) is not type('s'):
|
2011-11-23 21:12:18 +05:30
|
|
|
value = '%s'%value
|
|
|
|
value = [value]
|
|
|
|
newlen = len(value)
|
|
|
|
oldval = self.data[part][section][key]
|
|
|
|
oldlen = len(oldval)
|
2011-11-23 20:51:16 +05:30
|
|
|
print('changing %s:%s:%s:%s'%(part,section,key,oldval))
|
2011-11-23 21:12:18 +05:30
|
|
|
self.data[part][section][key] = value
|
2011-11-23 20:51:16 +05:30
|
|
|
print('new: %s'%self.data[part][section][key])
|
|
|
|
if newlen is not oldlen:
|
|
|
|
print('Length of value was changed from %i to %i!'%(oldlen,newlen))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2011-11-21 22:10:23 +05:30
|
|
|
|
2011-11-23 20:51:16 +05:30
|
|
|
|