modified testing class to use generic "damask" module.
removed obsolete individual class definitions.
This commit is contained in:
parent
ab7b9247c7
commit
48729ff2e1
|
@ -3,9 +3,7 @@
|
|||
|
||||
import os, sys
|
||||
import subprocess,shutil
|
||||
|
||||
import damask
|
||||
import msc_tools
|
||||
|
||||
class Test():
|
||||
'''
|
||||
|
@ -100,11 +98,10 @@ class Test():
|
|||
self.copy_from_ref_list()
|
||||
self.copy_files_from_reference_results()
|
||||
os.chdir('./current_results')
|
||||
#m=msc_tools.MSC_TOOLS()
|
||||
#m.submit_job(compile=compile, compiled_dir='../../../code/')
|
||||
damask.solver.Marc().submit_job(compile=compile, compiled_dir='../../../code/')
|
||||
m = damask.solver.Marc()
|
||||
m.submit_job(compile=compile, compiled_dir='../../../code/')
|
||||
print('simulation submitted')
|
||||
self.exit_number=m.exit_number_from_outFile(outFile=self.modelname+'_'+self.jobname+'.out')
|
||||
self.exit_number = m.exit_number_from_outFile(outFile=self.modelname+'_'+self.jobname+'.out')
|
||||
|
||||
if not self.exit_number==3004:
|
||||
print('Job did not exit with No. 3004')
|
||||
|
|
|
@ -1,158 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import os, sys
|
||||
import subprocess,shutil
|
||||
|
||||
import damask_tools
|
||||
import msc_tools
|
||||
|
||||
damask_tools.DAMASK_TOOLS().check_env()
|
||||
|
||||
class DAMASK_TEST():
|
||||
'''
|
||||
General class for testing.
|
||||
Is sub-classed by the individual tests.
|
||||
'''
|
||||
#define those according to your test
|
||||
modelname=None
|
||||
jobname=None
|
||||
test_dir=None
|
||||
spectral_options=None
|
||||
compile=False = None
|
||||
post_txt = None
|
||||
tol = 0.0
|
||||
|
||||
has_variants=False # False ==> A single test case is run
|
||||
#has_variants=True # True ==> Need to define the test_variants generator
|
||||
|
||||
def test_variants(self):
|
||||
'''
|
||||
If has_subtests == True this method defines the generator for test variants
|
||||
This generator must be defined in each test,
|
||||
depending on what to change: orientations, parameters,....
|
||||
Below is an EXAMPLE.
|
||||
'''
|
||||
subtest_orientations=[[0.0,90.0,0.0],[0.0,0.0,90.0]]
|
||||
for i,o in enumerate(subtest_orientations):
|
||||
from damask_tools import MATERIAL_CONFIG
|
||||
mat=MATERIAL_CONFIG()
|
||||
mat.read('material.config_base')
|
||||
mat.add_texture(label='Grain001',
|
||||
type ='(gauss)',
|
||||
eulers = o)
|
||||
mat.write(overwrite=True)
|
||||
print(mat.data['texture']['Grain001'])
|
||||
testlabel='orientation_%03i'%i
|
||||
yield(testlabel)
|
||||
|
||||
def run_test(self):
|
||||
res=[]
|
||||
if self.has_variants:
|
||||
for t in self.test_variants():
|
||||
print '###############################################'
|
||||
print '###############################################'
|
||||
print(t)
|
||||
print '###############################################'
|
||||
val=self.run_single_test(t)
|
||||
res.append(val==True)
|
||||
else:
|
||||
val=self.run_single_test()
|
||||
res.append(val==True)
|
||||
if all(res) is True:
|
||||
return True
|
||||
print(res)
|
||||
return False
|
||||
|
||||
def run_single_test(self,variant):
|
||||
self.clean_current_results()
|
||||
if self.calc_current_results(variant) is False:
|
||||
return False
|
||||
print('simulation finished')
|
||||
self.postprocess()
|
||||
if self.compare_to_reference() is False:
|
||||
print '++++++++ Test not OK +++++++++'
|
||||
return False
|
||||
print 'Test OK'
|
||||
return True
|
||||
|
||||
def clean_current_results(self):
|
||||
os.chdir(self.test_dir)
|
||||
try:
|
||||
shutil.rmtree('current_results')
|
||||
except:
|
||||
print('Could not delete current_results')
|
||||
os.mkdir('current_results')
|
||||
|
||||
def calc_current_results(self):
|
||||
'''
|
||||
Should be defined in the individual tests
|
||||
'''
|
||||
pass
|
||||
|
||||
def calc_marc(self,compile=None):
|
||||
'''
|
||||
Calculates current results for MSC.Marc
|
||||
'''
|
||||
if compile is None: compile=self.compile
|
||||
self.copy_from_ref=[self.modelname+'_'+self.jobname+'.dat',
|
||||
self.modelname+'.mfd', # for dev
|
||||
'material.config'
|
||||
]
|
||||
self.copy_files_from_reference_results()
|
||||
os.chdir('./current_results')
|
||||
m=msc_tools.MSC_TOOLS()
|
||||
m.submit_job(compile=compile, compiled_dir='../../../code/')
|
||||
print('simulation submitted')
|
||||
self.exit_number=m.exit_number_from_outFile(outFile=self.modelname+'_'+self.jobname+'.out')
|
||||
|
||||
if not self.exit_number==3004:
|
||||
print('Job did not exit with No. 3004')
|
||||
return False
|
||||
return True
|
||||
|
||||
def calc_spectral(self, compile=None):
|
||||
pass
|
||||
|
||||
def copy_files_from_reference_results(self):
|
||||
for file in self.copy_from_ref:
|
||||
shutil.copy2('./reference_results/%s'%file,'./current_results/%s'%file)
|
||||
# Note: possibly symlinking? No, because copy is OS independent.
|
||||
|
||||
def read_val_from_file(self,fname=None):
|
||||
file = open(fname,'r')
|
||||
headerlength = int(file.readline().split()[0]) + 1
|
||||
file.close
|
||||
import numpy as N
|
||||
val = N.loadtxt(fname,skiprows=headerlength)
|
||||
return val
|
||||
|
||||
def compare_to_reference(self):
|
||||
import string, numpy as N
|
||||
print 'comparing results against reference_results...'
|
||||
os.chdir(os.path.join(self.test_dir,'current_results'))
|
||||
cur=self.read_val_from_file(fname='postProc/'+self.post_txt)
|
||||
ref=self.read_val_from_file(fname='../reference_results/postProc/'+self.post_txt)
|
||||
|
||||
err=abs((ref/cur)-1.) # relative tolerance
|
||||
#err=abs(ref-cur) # absolute tolerance
|
||||
print 'tol', self.tol
|
||||
print 'max error', N.max(err)
|
||||
if N.max(err)>self.tol:
|
||||
return False
|
||||
return True
|
||||
|
||||
def postprocess(self):
|
||||
print 'postprocessing results ...'
|
||||
os.chdir(self.test_dir)
|
||||
file=open('./postprocessing.cmd','r')
|
||||
postproc=file.readlines()
|
||||
file.close()
|
||||
os.chdir(os.path.join(self.test_dir,'current_results'))
|
||||
for cmd in postproc: # PHILIP: suggestion to just execute the script "postprocessing" directly within a shell, i.e. os.system('../postprocessing')
|
||||
print(cmd)
|
||||
os.system(cmd) # PHILIP: reason is that for loops and the like get broken with line by line execution from here...
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test=DAMASK_TESTER()
|
||||
test.run_test()
|
||||
|
|
@ -1,346 +0,0 @@
|
|||
import os,sys,string,re
|
||||
|
||||
|
||||
class DAMASK_TOOLS():
|
||||
|
||||
__slots__ = ['pathInfo',
|
||||
]
|
||||
|
||||
def __init__(self,rootRelation = '.'):
|
||||
self.pathInfo = {\
|
||||
'acml': '/opt/acml4.4.0',
|
||||
'fftw': '.',
|
||||
'msc': '/msc',
|
||||
}
|
||||
self.get_pathInfo(rootRelation)
|
||||
|
||||
def relPath(self,relative = '.'):
|
||||
return os.path.join(self.rootDir(),relative)
|
||||
|
||||
def rootDir(self,rootRelation = '.'): #getting pathinfo
|
||||
damask_root = os.getenv('DAMASK_ROOT')
|
||||
if damask_root == '' or damask_root == None: damask_root = os.path.join(os.path.dirname(sys.argv[0]),rootRelation)
|
||||
return damask_root
|
||||
|
||||
def binDir(self,rootRelation = '.'): #getting pathinfo
|
||||
damask_bin = os.getenv('DAMASK_BIN')
|
||||
if damask_bin == '' or damask_bin == None: damask_bin = self.relPath('bin/')
|
||||
return damask_bin
|
||||
|
||||
def get_pathInfo(self,rootRelation = '.'): #getting pathinfo
|
||||
damask_root = self.rootDir(rootRelation)
|
||||
|
||||
try: # check for user-defined pathinfo
|
||||
file = open(self.relPath('lib/pathinfo'))
|
||||
content = file.readlines()
|
||||
file.close()
|
||||
for line in content:
|
||||
self.pathInfo[line.split()[0].lower()] = os.path.normpath(os.path.join(self.relPath('lib/'),line.split()[1]))
|
||||
except:
|
||||
pass
|
||||
|
||||
def check_env(self):
|
||||
import os
|
||||
if os.getenv('DAMASK_ROOT') is None:
|
||||
print('No DAMASK_ROOT environment variable, did you run DAMASK/installation/setup_shellrc?')
|
||||
return os.getenv('DAMASK_ROOT') != None
|
||||
|
||||
|
||||
|
||||
class ASCII_TABLE():
|
||||
'''
|
||||
There should be a doc string here :)
|
||||
'''
|
||||
import sys
|
||||
__slots__ = ['__IO__',
|
||||
'info',
|
||||
'labels',
|
||||
'data',
|
||||
]
|
||||
|
||||
def __init__(self,
|
||||
fileIn = sys.stdin,
|
||||
fileOut = sys.stdout,
|
||||
buffered = True):
|
||||
self.__IO__ = {'in': fileIn,
|
||||
'out':fileOut,
|
||||
'output':[],
|
||||
'buffered':buffered,
|
||||
'validReadSize': 0,
|
||||
}
|
||||
self.info = []
|
||||
self.labels = []
|
||||
self.data = []
|
||||
|
||||
def output_write(self,
|
||||
what):
|
||||
if isinstance(what,list):
|
||||
for item in what: self.output_write(item)
|
||||
else:
|
||||
self.__IO__['output'] += [str(what)]
|
||||
self.__IO__['buffered'] or self.output_flush()
|
||||
|
||||
def output_flush(self,
|
||||
clear = True):
|
||||
self.__IO__['output'] == [] or self.__IO__['out'].write('\n'.join(self.__IO__['output']) + '\n')
|
||||
if clear: self.output_clear()
|
||||
|
||||
def output_clear(self):
|
||||
self.__IO__['output'] = []
|
||||
|
||||
def head_read(self):
|
||||
'''
|
||||
get column labels by either read the first row, or
|
||||
--if keyword "head[*]" is present-- the last line of the header
|
||||
'''
|
||||
try:
|
||||
self.__IO__['in'].seek(0)
|
||||
except:
|
||||
pass
|
||||
firstline = self.__IO__['in'].readline()
|
||||
m = re.search('(\d+)\s*head', firstline.lower())
|
||||
if m:
|
||||
self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))]
|
||||
self.labels = self.__IO__['in'].readline().split()
|
||||
else:
|
||||
self.info = []
|
||||
self.labels = firstline.split()
|
||||
self.__IO__['validReadSize'] = len(self.labels)
|
||||
|
||||
def head_write(self):
|
||||
self.output_write (['%i\theader'%(len(self.info)+1),
|
||||
self.info,
|
||||
'\t'.join(self.labels)])
|
||||
|
||||
def labels_append(self,
|
||||
what):
|
||||
if isinstance(what,list):
|
||||
for item in what: self.labels_append(item)
|
||||
else: self.labels += [str(what)]
|
||||
|
||||
def info_append(self,
|
||||
what):
|
||||
if isinstance(what,list):
|
||||
for item in what: self.info_append(item)
|
||||
else: self.info += [str(what)]
|
||||
|
||||
def data_read(self):
|
||||
line = self.__IO__['in'].readline()
|
||||
items = line.split()[:self.__IO__['validReadSize']] # get next data row
|
||||
self.data = {False: [],
|
||||
True: items}[len(items) == self.__IO__['validReadSize']] # take if correct number of entries
|
||||
return line != ''
|
||||
|
||||
def data_write(self):
|
||||
if isinstance(self.data[0],list):
|
||||
self.output_write (['\t'.join(map(str,items)) for items in self.data])
|
||||
else:
|
||||
self.output_write ('\t'.join(map(str,self.data)))
|
||||
|
||||
def data_append(self,
|
||||
what):
|
||||
if isinstance(what,list):
|
||||
for item in what: self.data_append(item)
|
||||
else: self.data += [str(what)]
|
||||
|
||||
|
||||
class MATERIAL_CONFIG():
|
||||
'''
|
||||
Reads, manipulates and writes material.config files
|
||||
'''
|
||||
__slots__ = ['data']
|
||||
|
||||
def __init__(self):
|
||||
self.parts = [
|
||||
'homogenization',
|
||||
'microstructure',
|
||||
'crystallite',
|
||||
'phase',
|
||||
'texture',
|
||||
] # ordered (!) list of parts
|
||||
self.data = {\
|
||||
'homogenization': {'__order__': []},
|
||||
'microstructure': {'__order__': []},
|
||||
'crystallite': {'__order__': []},
|
||||
'phase': {'__order__': []},
|
||||
'texture': {'__order__': []},
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
me = []
|
||||
for part in self.parts:
|
||||
print 'doing',part
|
||||
me += ['','#-----------------------------#','<%s>'%part,'#-----------------------------#',]
|
||||
for section in self.data[part]['__order__']:
|
||||
me += ['','[%s] %s'%(section,'-'*max(0,27-len(section))),'',]
|
||||
for key in self.data[part][section]['__order__']:
|
||||
if key.startswith('(') and key.endswith(')'): # multiple (key)
|
||||
me += ['%s\t%s'%(key,' '.join(values)) for values in self.data[part][section][key]]
|
||||
else: # plain key
|
||||
me += ['%s\t%s'%(key,' '.join(map(str,self.data[part][section][key])))]
|
||||
|
||||
return '\n'.join(me)
|
||||
|
||||
def parse_data(self, part=None, sections=[], content=None):
|
||||
|
||||
re_part = re.compile(r'^<(.+)>$') # pattern for part
|
||||
re_sec = re.compile(r'^\[(.+)\]$') # pattern for section
|
||||
|
||||
name_section = ''
|
||||
idx_section = 0
|
||||
active = False
|
||||
|
||||
for line in content:
|
||||
line = line.split('#')[0].strip() # kill comments and extra whitespace
|
||||
if line: # content survives...
|
||||
match_part = re_part.match(line)
|
||||
if match_part: # found <part> separator
|
||||
active = (match_part.group(1) == part) # only active in <part>
|
||||
continue
|
||||
if active:
|
||||
match_sec = re_sec.match(line)
|
||||
if match_sec: # found [section]
|
||||
name_section = match_sec.group(1) # remember name ...
|
||||
if '__order__' not in self.data[part]: self.data[part]['__order__'] = []
|
||||
self.data[part]['__order__'].append(name_section) # ... and position
|
||||
self.data[part][name_section] = {'__order__':[]}
|
||||
continue
|
||||
|
||||
if sections == [] or name_section in sections: # respect subset
|
||||
items = line.split()
|
||||
if items[0] not in self.data[part][name_section]: # first encounter of key?
|
||||
self.data[part][name_section][items[0]] = [] # create item
|
||||
self.data[part][name_section]['__order__'].append(items[0])
|
||||
if items[0].startswith('(') and items[0].endswith(')'): # multiple "(key)"
|
||||
self.data[part][name_section][items[0]].append(items[1:])
|
||||
else: # plain key
|
||||
self.data[part][name_section][items[0]] = items[1:]
|
||||
|
||||
def read(self,file=None):
|
||||
f=open(file,'r')
|
||||
c=f.readlines()
|
||||
f.close()
|
||||
for p in self.parts:
|
||||
self.parse_data(part=p, content=c)
|
||||
|
||||
def write(self,file='material.config', overwrite=False):
|
||||
if overwrite is False:
|
||||
if os.path.exists(file):
|
||||
i=1
|
||||
while os.path.exists(file+'_%i'%i):i+=1
|
||||
file+='_%i'%i
|
||||
print('Writing material data to file %s'%file)
|
||||
f=open(file,'w')
|
||||
f.write(str(self))
|
||||
f.close()
|
||||
return file
|
||||
|
||||
def add_data(self, part=None, section=None, data={}):
|
||||
'''Generic data adding/updating'''
|
||||
if part not in self.parts: raise Exception('invalid part %s'%part)
|
||||
if section not in self.data[part]: self.data[part]['__order__'] += [section]
|
||||
self.data[part][section] = data
|
||||
|
||||
def add_homogenization(self, label='', type='', Ngrains=None):
|
||||
if type.lower() == 'isostrain':
|
||||
self.add_data(part='homogenization',
|
||||
section=label,
|
||||
data={'type':[type],
|
||||
'Ngrains':[Ngrains],
|
||||
'__order__':['type','Ngrains']
|
||||
}
|
||||
)
|
||||
elif type.lower() == 'rgc':
|
||||
raise Exception('Please implement me')
|
||||
|
||||
def add_crystallite(self, label='', output=[]):
|
||||
self.add_data(part='crystallite',
|
||||
section=label,
|
||||
data={'(output)':[[o] for o in output],
|
||||
'__order__':'(output)'})
|
||||
|
||||
def add_texture(self, label='',type='', eulers=[], scatter=0., fraction=1.):
|
||||
''' Experimental! Needs expansion to multi-component textures...'''
|
||||
if type == '(gauss)':
|
||||
texture={type:[['phi1','%f'%float(eulers[0]),'Phi','%f'%float(eulers[1]), 'phi2','%f'%float(eulers[2]),'scatter','%f'%float(scatter), 'fraction','%f'%fraction]],'__order__':label}
|
||||
#self.data['texture'][label]=texture
|
||||
#if len(self.data['texture'])>old_len: # added new label
|
||||
# self.data['texture']['__order__'].append(label)
|
||||
else:
|
||||
raise Exception('Please implement me.')
|
||||
self.add_data(part='texture',section=label, data=texture)
|
||||
|
||||
def add_phase(self, file='', label='', newlabel=None, phase=None):
|
||||
''' USAGE:
|
||||
- read phase "label" from file
|
||||
OR
|
||||
- phase is dict with one key
|
||||
'''
|
||||
if file and label and (phase is None):
|
||||
other=MATERIAL_CONFIG()
|
||||
other.read(file=file)
|
||||
phase={label:other.data['phase'][label]}
|
||||
label=None
|
||||
print phase
|
||||
if len(phase)==1 and label is None:
|
||||
if newlabel:
|
||||
label=newlabel
|
||||
else:
|
||||
label=phase.keys()[0]
|
||||
print('Adding phase %s'%label)
|
||||
self.add_data(part='phase', section=label, data=phase)
|
||||
else:
|
||||
raise Exception('Wrong arguments')
|
||||
|
||||
def add_microstructure(self, label=None,
|
||||
crystallite=None, # label
|
||||
phases=None, # list of labels
|
||||
textures=None, # list of labels
|
||||
fractions=None): # list of floats
|
||||
''' Experimental! Needs expansion to multi-constituent microstructures...'''
|
||||
c=self.data['crystallite']['__order__'].index(crystallite)+1
|
||||
constituent=phases[:]
|
||||
if fractions is None:
|
||||
fractions=[1./len(phases)]*len(phases)
|
||||
for i in range(len(phases)):
|
||||
p=self.data['phase']['__order__'].index(phases[i])+1
|
||||
t=self.data['texture']['__order__'].index(textures[i])+1
|
||||
f=fractions[i]
|
||||
constituent[i]=['phase','%i'%p,'texture','%i'%t,'fraction','%f'%f]
|
||||
data={'crystallite':['%i'%c],
|
||||
'(constituent)':constituent,
|
||||
'__order__':['crystallite','(constituent)']}
|
||||
self.add_data(part='microstructure',section=label,data=data)
|
||||
|
||||
def change_value(self, part=None,
|
||||
section=None,
|
||||
key=None,
|
||||
value=None):
|
||||
if type(value) is not type([]):
|
||||
if type(value) is not type('s'):
|
||||
value = '%s'%value
|
||||
value = [value]
|
||||
newlen = len(value)
|
||||
oldval = self.data[part][section][key]
|
||||
oldlen = len(oldval)
|
||||
print('changing %s:%s:%s:%s'%(part,section,key,oldval))
|
||||
self.data[part][section][key] = value
|
||||
print('new: %s'%self.data[part][section][key])
|
||||
if newlen is not oldlen:
|
||||
print('Length of value was changed from %i to %i!'%(oldlen,newlen))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
134
lib/msc_tools.py
134
lib/msc_tools.py
|
@ -1,134 +0,0 @@
|
|||
class MSC_TOOLS():
|
||||
import os,string
|
||||
|
||||
releases = { \
|
||||
'2010.2':['linux64',''],
|
||||
'2010': ['linux64',''],
|
||||
'2008r1':[''],
|
||||
'2007r1':[''],
|
||||
'2005r3':[''],
|
||||
}
|
||||
|
||||
#--------------------------
|
||||
def version(self,callerPath='',rootRelation=''):
|
||||
#--------------------------
|
||||
import os
|
||||
|
||||
if os.getenv('DAMASK_ROOT'): DamaskRoot = os.getenv('DAMASK_ROOT')
|
||||
else: DamaskRoot = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(callerPath)),rootRelation))
|
||||
|
||||
try: # check for MSC.Mentat installation location
|
||||
file = open(os.path.join(DamaskRoot,'lib/pathinfo'))
|
||||
for line in file.readlines():
|
||||
if line.split()[0].lower() == 'msc': MSCpath = os.path.normpath(line.split()[1])
|
||||
file.close()
|
||||
except:
|
||||
MSCpath = '/msc'
|
||||
|
||||
for release,subdirs in sorted(self.releases.items(),reverse=True):
|
||||
for subdir in subdirs:
|
||||
libPath = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir)
|
||||
if os.path.exists(libPath): return release
|
||||
else: continue
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
#--------------------------
|
||||
def libraryPath(self,callerPath='',rootRelation=''):
|
||||
#--------------------------
|
||||
import os
|
||||
|
||||
if os.getenv('DAMASK_ROOT'): DamaskRoot = os.getenv('DAMASK_ROOT')
|
||||
else: DamaskRoot = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(callerPath)),rootRelation))
|
||||
|
||||
try: # check for MSC.Mentat installation location
|
||||
file = open(os.path.join(DamaskRoot,'lib/pathinfo'))
|
||||
for line in file.readlines():
|
||||
if line.split()[0].lower() == 'msc': MSCpath = os.path.normpath(line.split()[1])
|
||||
file.close()
|
||||
except:
|
||||
MSCpath = '/msc'
|
||||
|
||||
for release,subdirs in sorted(self.releases.items(),reverse=True):
|
||||
for subdir in subdirs:
|
||||
libPath = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir)
|
||||
if os.path.exists(libPath): return libPath
|
||||
else: continue
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
#--------------------------
|
||||
def geometry_create (self,inputFile):
|
||||
#--------------------------
|
||||
'''
|
||||
Build nodal coordinates and element connectivity from MSC.Marc input file.
|
||||
'''
|
||||
|
||||
|
||||
#--------------------------
|
||||
def geometry_deform (self,vtkFile,ASCIItable,displacementHeader):
|
||||
#--------------------------
|
||||
'''
|
||||
Alter nodal coordinates in existing VTK file by displacements given in ASCIItable.
|
||||
'''
|
||||
|
||||
|
||||
#--------------------------
|
||||
def submit_job(self,
|
||||
#--------------------------
|
||||
run_marc_path='/msc/marc2010/tools/',
|
||||
subroutine_dir=None,
|
||||
subroutine_name='DAMASK_marc2010',
|
||||
compile=False,
|
||||
compiled_dir='../../../code/',
|
||||
modelname='one_element_model',
|
||||
jobname='job1',
|
||||
#IOdir='',
|
||||
host=[]
|
||||
):
|
||||
import os
|
||||
import subprocess, shlex
|
||||
import shutil
|
||||
|
||||
if subroutine_dir is None:
|
||||
subroutine_dir=os.getenv('DAMASK_ROOT')+'/code/'
|
||||
# Define all options [see Marc Installation and Operation Guide, pp 23]
|
||||
run_marc=run_marc_path+'run_marc'
|
||||
jid=' -jid '+modelname+'_'+jobname
|
||||
compilation=' -u '+subroutine_dir+subroutine_name+'.f90'+' -save y'
|
||||
options=' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
|
||||
cmd=run_marc+jid+options
|
||||
|
||||
if compile:
|
||||
cmd=cmd+compilation
|
||||
print 'Job submission with compilation.'
|
||||
else:
|
||||
shutil.copy2(subroutine_dir+subroutine_name+'.f90','./'+subroutine_name+'.f90')
|
||||
shutil.copy2(compiled_dir+subroutine_name+'.marc','./'+subroutine_name+'.marc')
|
||||
prog=' -prog '+subroutine_name
|
||||
cmd=cmd+prog
|
||||
print 'Job submission without compilation, using %s'%prog
|
||||
out=open('out.log','w')
|
||||
print(cmd)
|
||||
#print shlex.split(cmd)
|
||||
self.p=subprocess.Popen(shlex.split(cmd),stdout=out,stderr=subprocess.STDOUT)
|
||||
self.p.wait()
|
||||
out.close()
|
||||
|
||||
#--------------------------
|
||||
def exit_number_from_outFile(self,outFile=None):
|
||||
#--------------------------
|
||||
import string
|
||||
fid_out=open(outFile,'r')
|
||||
for ln in fid_out:
|
||||
if (string.find(ln,'tress iteration') is not -1):
|
||||
print ln
|
||||
elif (string.find(ln,'Exit number') is not -1):
|
||||
substr=ln[string.find(ln,'Exit number'):len(ln)]
|
||||
exitnumber=substr[12:16]
|
||||
fid_out.close()
|
||||
return int(exitnumber)
|
||||
fid_out.close()
|
||||
return -1
|
Loading…
Reference in New Issue