2014-04-02 00:11:14 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
|
|
|
|
2011-12-22 16:00:25 +05:30
|
|
|
# $Id$
|
|
|
|
|
2013-01-14 20:50:17 +05:30
|
|
|
import os, sys, shlex, inspect
|
2012-02-16 02:24:14 +05:30
|
|
|
import subprocess,shutil,string
|
2014-06-04 21:04:35 +05:30
|
|
|
import logging, logging.config
|
2011-12-15 20:23:10 +05:30
|
|
|
import damask
|
2012-02-16 02:24:14 +05:30
|
|
|
from optparse import OptionParser
|
2011-12-15 20:23:10 +05:30
|
|
|
|
|
|
|
class Test():
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
General class for testing.
|
|
|
|
Is sub-classed by the individual tests.
|
|
|
|
'''
|
|
|
|
|
|
|
|
variants = []
|
|
|
|
|
2012-02-16 02:24:14 +05:30
|
|
|
def __init__(self,test_description):
|
2014-06-04 21:04:35 +05:30
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.setLevel(0)
|
|
|
|
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
|
|
|
fh.setLevel(logging.DEBUG)
|
|
|
|
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
|
|
|
fh.setFormatter(full)
|
|
|
|
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
|
|
|
ch.setLevel(logging.INFO)
|
|
|
|
# create formatter and add it to the handlers
|
|
|
|
plain = logging.Formatter('%(message)s')
|
|
|
|
ch.setFormatter(plain)
|
|
|
|
# add the handlers to the logger
|
|
|
|
logger.addHandler(fh)
|
|
|
|
logger.addHandler(ch)
|
|
|
|
|
|
|
|
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \
|
2012-07-26 13:17:31 +05:30
|
|
|
+'----------------------------------------------------------------\n' \
|
|
|
|
+'| '+test_description+'\n' \
|
2013-09-19 01:01:27 +05:30
|
|
|
+'----------------------------------------------------------------')
|
2014-06-03 16:00:51 +05:30
|
|
|
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
2012-02-16 02:24:14 +05:30
|
|
|
self.parser = OptionParser(
|
2015-12-04 03:22:03 +05:30
|
|
|
description = test_description+' (using class: $Id$)',
|
2014-06-03 18:39:52 +05:30
|
|
|
usage='./test.py [options]')
|
2014-06-11 23:16:26 +05:30
|
|
|
self.updateRequested = False
|
2015-12-15 20:00:17 +05:30
|
|
|
self.parser.add_option("-d", "--debug", action="store_true",\
|
|
|
|
dest="debug",\
|
|
|
|
help="debug run, don't calculate but use existing results")
|
|
|
|
self.parser.add_option("-p", "--pass", action="store_true",\
|
|
|
|
dest="accept",\
|
|
|
|
help="calculate results but always consider test as successfull")
|
|
|
|
self.parser.set_defaults(debug=False,
|
|
|
|
accept=False)
|
2014-06-11 23:16:26 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
def execute(self):
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
Run all variants and report first failure.
|
|
|
|
'''
|
2015-12-15 20:00:17 +05:30
|
|
|
if self.options.debug:
|
|
|
|
for variant in xrange(len(self.variants)):
|
|
|
|
try:
|
|
|
|
self.postprocess(variant)
|
|
|
|
if not self.compare(variant):
|
|
|
|
return variant+1 # return culprit
|
|
|
|
except Exception as e :
|
|
|
|
logging.critical('\nWARNING:\n %s\n'%e)
|
|
|
|
return variant+1 # return culprit
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
if not self.testPossible(): return -1
|
|
|
|
self.clean()
|
|
|
|
self.prepareAll()
|
|
|
|
for variant in xrange(len(self.variants)):
|
|
|
|
try:
|
|
|
|
self.prepare(variant)
|
|
|
|
self.run(variant)
|
|
|
|
self.postprocess(variant)
|
|
|
|
if self.updateRequested: # update requested
|
|
|
|
self.update(variant)
|
|
|
|
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
|
|
|
return variant+1 # return culprit
|
|
|
|
except Exception as e :
|
|
|
|
logging.critical('\nWARNING:\n %s\n'%e)
|
|
|
|
return variant+1 # return culprit
|
|
|
|
return 0
|
2013-03-05 01:09:13 +05:30
|
|
|
|
|
|
|
def testPossible(self):
|
|
|
|
'''
|
|
|
|
Check if test is possible or not (e.g. no license available).
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def clean(self):
|
2011-12-15 20:23:10 +05:30
|
|
|
'''
|
2012-01-18 15:04:49 +05:30
|
|
|
Delete directory tree containing current results.
|
|
|
|
'''
|
|
|
|
status = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.rmtree(self.dirCurrent())
|
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.warning('removal of directory "%s" not possible...'%(self.dirCurrent()))
|
2012-01-18 15:04:49 +05:30
|
|
|
status = status and False
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.mkdir(self.dirCurrent())
|
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('creation of directory "%s" failed...'%(self.dirCurrent()))
|
2012-01-18 15:04:49 +05:30
|
|
|
status = status and False
|
|
|
|
|
|
|
|
return status
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def prepareAll(self):
|
|
|
|
'''
|
|
|
|
Do all necessary preparations for the whole test
|
|
|
|
'''
|
|
|
|
return True
|
2012-01-18 15:04:49 +05:30
|
|
|
|
|
|
|
def prepare(self,variant):
|
|
|
|
'''
|
|
|
|
Do all necessary preparations for the run of each test variant
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def run(self,variant):
|
|
|
|
'''
|
|
|
|
Execute the requested test variant.
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def postprocess(self,variant):
|
|
|
|
'''
|
|
|
|
Perform post-processing of generated results for this test variant.
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def compare(self,variant):
|
|
|
|
'''
|
|
|
|
Compare reference to current results.
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def update(self,variant):
|
|
|
|
'''
|
|
|
|
Update reference with current results.
|
|
|
|
'''
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.debug('Update not necessary')
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def dirReference(self):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Directory containing reference results of the test.
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'reference/'))
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def dirCurrent(self):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Directory containing current results of the test.
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def dirProof(self):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Directory containing human readable proof of correctness for the test.
|
2012-12-12 22:40:04 +05:30
|
|
|
'''
|
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def fileInRoot(self,dir,file):
|
|
|
|
'''
|
|
|
|
Path to a file in the root directory of DAMASK.
|
|
|
|
'''
|
|
|
|
return os.path.join(damask.Environment().rootDir(),dir,file)
|
|
|
|
|
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def fileInReference(self,file):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Path to a file in the refrence directory for the test.
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
return os.path.join(self.dirReference(),file)
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def fileInCurrent(self,file):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Path to a file in the current results directory for the test.
|
2012-01-18 15:04:49 +05:30
|
|
|
'''
|
|
|
|
return os.path.join(self.dirCurrent(),file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def fileInProof(self,file):
|
|
|
|
'''
|
2013-11-07 18:22:36 +05:30
|
|
|
Path to a file in the proof directory for the test.
|
2012-12-12 22:40:04 +05:30
|
|
|
'''
|
|
|
|
return os.path.join(self.dirProof(),file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def copy(self, mapA, mapB,
|
|
|
|
A = [], B = []):
|
|
|
|
'''
|
|
|
|
copy list of files from (mapped) source to target.
|
|
|
|
mapA/B is one of self.fileInX.
|
|
|
|
'''
|
|
|
|
|
|
|
|
if not B or len(B) == 0: B = A
|
|
|
|
|
|
|
|
for source,target in zip(map(mapA,A),map(mapB,B)):
|
|
|
|
try:
|
|
|
|
shutil.copy2(source,target)
|
|
|
|
except:
|
|
|
|
logging.critical('error copying {} to {}'.format(source,target))
|
|
|
|
|
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
2012-02-23 23:14:09 +05:30
|
|
|
try:
|
2012-07-17 18:34:57 +05:30
|
|
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
2012-02-23 23:14:09 +05:30
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('Reference2Current: Unable to copy file %s'%file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
|
2013-03-26 17:36:19 +05:30
|
|
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
2016-01-29 23:51:04 +05:30
|
|
|
source=os.path.normpath(os.path.join(self.dirBase,'../../../'+sourceDir))
|
2013-03-26 17:36:19 +05:30
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
|
|
|
except:
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.error(os.path.join(source,file))
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('Base2Current: Unable to copy file %s'%file)
|
2013-03-26 17:36:19 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('Current2Reference: Unable to copy file %s'%file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('Proof2Current: Unable to copy file %s'%file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
for i,file in enumerate(sourcefiles):
|
2012-02-23 23:14:09 +05:30
|
|
|
try:
|
2012-07-17 18:34:57 +05:30
|
|
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
2012-02-23 23:14:09 +05:30
|
|
|
except:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('Current2Current: Unable to copy file %s'%file)
|
2012-01-18 15:04:49 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2014-10-03 02:57:03 +05:30
|
|
|
def execute_inCurrentDir(self,cmd,streamIn=None):
|
|
|
|
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(cmd)
|
2015-10-09 11:21:58 +05:30
|
|
|
out,error = damask.util.execute(cmd,streamIn,self.dirCurrent())
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2014-10-03 02:57:03 +05:30
|
|
|
logging.info(error)
|
|
|
|
logging.debug(out)
|
|
|
|
|
|
|
|
return out,error
|
|
|
|
|
|
|
|
|
2012-07-18 18:01:07 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def compare_Array(self,File1,File2):
|
2015-08-13 00:02:28 +05:30
|
|
|
|
|
|
|
import numpy as np
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info('comparing\n '+File1+'\n '+File2)
|
2015-12-04 03:22:03 +05:30
|
|
|
table1 = damask.ASCIItable(name=File1,readonly=True)
|
2015-08-12 19:13:05 +05:30
|
|
|
table1.head_read()
|
|
|
|
len1=len(table1.info)+2
|
2015-12-04 03:22:03 +05:30
|
|
|
table2 = damask.ASCIItable(name=File2,readonly=True)
|
2015-08-12 19:13:05 +05:30
|
|
|
table2.head_read()
|
|
|
|
len2=len(table2.info)+2
|
|
|
|
|
2015-08-13 00:02:28 +05:30
|
|
|
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
|
|
|
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
2014-10-09 21:07:35 +05:30
|
|
|
|
|
|
|
if len(curArray) == len(refArray):
|
2012-11-21 21:07:59 +05:30
|
|
|
refArrayNonZero = refArray[refArray.nonzero()]
|
|
|
|
curArray = curArray[refArray.nonzero()]
|
2015-08-13 00:02:28 +05:30
|
|
|
max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
|
|
|
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
2012-11-21 21:07:59 +05:30
|
|
|
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
|
|
|
curArray = curArray[curArray.nonzero()]
|
2014-02-04 01:00:28 +05:30
|
|
|
print(' ********\n * maximum relative error %e for %e and %e\n ********'
|
2013-09-20 19:52:37 +05:30
|
|
|
%(max_err, refArrayNonZero[max_loc],curArray[max_loc]))
|
2012-10-25 13:25:27 +05:30
|
|
|
return max_err
|
|
|
|
else:
|
2012-11-23 20:46:51 +05:30
|
|
|
raise Exception('mismatch in array size to compare')
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def compare_ArrayRefCur(self,ref,cur=''):
|
|
|
|
|
|
|
|
if cur =='': cur = ref
|
|
|
|
refName = self.fileInReference(ref)
|
|
|
|
curName = self.fileInCurrent(cur)
|
|
|
|
return self.compare_Array(refName,curName)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
def compare_ArrayCurCur(self,cur0,cur1):
|
2012-11-22 23:31:51 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
cur0Name = self.fileInCurrent(cur0)
|
|
|
|
cur1Name = self.fileInCurrent(cur1)
|
|
|
|
return self.compare_Array(cur0Name,cur1Name)
|
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None,
|
2012-12-12 22:40:04 +05:30
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2015-08-13 00:02:28 +05:30
|
|
|
import numpy as np
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info('comparing ASCII Tables\n %s \n %s'%(file0,file1))
|
2012-12-12 22:40:04 +05:30
|
|
|
if normHeadings == '': normHeadings = headings0
|
2012-11-22 23:31:51 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
if len(headings0) == len(headings1) == len(normHeadings): #check if comparison is possible and determine lenght of columns
|
2012-11-22 23:31:51 +05:30
|
|
|
dataLength = len(headings0)
|
|
|
|
length = [1 for i in xrange(dataLength)]
|
|
|
|
shape = [[] for i in xrange(dataLength)]
|
|
|
|
data = [[] for i in xrange(dataLength)]
|
|
|
|
maxError = [0.0 for i in xrange(dataLength)]
|
2012-12-12 22:40:04 +05:30
|
|
|
absTol = [absoluteTolerance for i in xrange(dataLength)]
|
2012-11-23 22:51:18 +05:30
|
|
|
column = [[1 for i in xrange(dataLength)] for j in xrange(2)]
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
norm = [[] for i in xrange(dataLength)]
|
|
|
|
normLength = [1 for i in xrange(dataLength)]
|
|
|
|
normShape = [[] for i in xrange(dataLength)]
|
|
|
|
normColumn = [1 for i in xrange(dataLength)]
|
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
|
|
|
if headings0[i]['shape'] != headings1[i]['shape']:
|
2014-02-04 01:00:28 +05:30
|
|
|
raise Exception('shape mismatch when comparing %s with %s '%(headings0[i]['label'],headings1[i]['label']))
|
2012-11-23 20:46:51 +05:30
|
|
|
shape[i] = headings0[i]['shape']
|
2015-08-13 00:02:28 +05:30
|
|
|
for j in xrange(np.shape(shape[i])[0]):
|
2012-11-23 20:46:51 +05:30
|
|
|
length[i] *= shape[i][j]
|
2012-12-12 22:40:04 +05:30
|
|
|
normShape[i] = normHeadings[i]['shape']
|
2015-08-13 00:02:28 +05:30
|
|
|
for j in xrange(np.shape(normShape[i])[0]):
|
2012-12-12 22:40:04 +05:30
|
|
|
normLength[i] *= normShape[i][j]
|
2012-11-22 23:31:51 +05:30
|
|
|
else:
|
2014-02-04 01:00:28 +05:30
|
|
|
raise Exception('trying to compare %i with %i normed by %i data sets'%(len(headings0),len(headings1),len(normHeadings)))
|
2012-01-27 15:25:19 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
table0 = damask.ASCIItable(name=file0,readonly=True)
|
2012-11-22 23:31:51 +05:30
|
|
|
table0.head_read()
|
2015-12-04 03:22:03 +05:30
|
|
|
table1 = damask.ASCIItable(name=file1,readonly=True)
|
2012-11-22 23:31:51 +05:30
|
|
|
table1.head_read()
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
2012-12-12 22:40:04 +05:30
|
|
|
key0 = {True :'1_%s',
|
|
|
|
False:'%s' }[length[i]>1]%headings0[i]['label']
|
|
|
|
key1 = {True :'1_%s',
|
|
|
|
False:'%s' }[length[i]>1]%headings1[i]['label']
|
|
|
|
normKey = {True :'1_%s',
|
|
|
|
False:'%s' }[normLength[i]>1]%normHeadings[i]['label']
|
2012-11-22 23:31:51 +05:30
|
|
|
if key0 not in table0.labels:
|
|
|
|
raise Exception('column %s not found in 1. table...\n'%key0)
|
|
|
|
elif key1 not in table1.labels:
|
|
|
|
raise Exception('column %s not found in 2. table...\n'%key1)
|
2012-12-12 22:40:04 +05:30
|
|
|
elif normKey not in table0.labels:
|
|
|
|
raise Exception('column %s not found in 1. table...\n'%normKey)
|
2012-11-22 23:31:51 +05:30
|
|
|
else:
|
2012-12-12 22:40:04 +05:30
|
|
|
column[0][i] = table0.labels.index(key0) # remember columns of requested data
|
|
|
|
column[1][i] = table1.labels.index(key1) # remember columns of requested data in second column
|
|
|
|
normColumn[i] = table0.labels.index(normKey) # remember columns of requested data in second column
|
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
line0 = 0
|
|
|
|
while table0.data_read(): # read next data line of ASCII table
|
2012-12-12 22:40:04 +05:30
|
|
|
if line0 not in skipLines:
|
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
myData = np.array(map(float,table0.data[column[0][i]:\
|
2012-12-12 22:40:04 +05:30
|
|
|
column[0][i]+length[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
normData = np.array(map(float,table0.data[normColumn[i]:\
|
2012-12-12 22:40:04 +05:30
|
|
|
normColumn[i]+normLength[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
data[i] = np.append(data[i],np.reshape(myData,shape[i]))
|
2012-12-12 22:40:04 +05:30
|
|
|
if normType == 'pInf':
|
2015-08-13 00:02:28 +05:30
|
|
|
norm[i] = np.append(norm[i],np.max(np.abs(normData)))
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2015-08-13 00:02:28 +05:30
|
|
|
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
2012-11-22 23:31:51 +05:30
|
|
|
line0 +=1
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
|
|
|
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
2012-12-12 22:40:04 +05:30
|
|
|
if any(norm[i]) == 0.0 or absTol[i]:
|
|
|
|
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
|
|
|
absTol[i] = True
|
|
|
|
if perLine:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.warning('At least one norm of %s in 1. table is 0.0, using absolute tolerance'%headings0[i]['label'])
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.warning('Maximum norm of %s in 1. table is 0.0, using absolute tolerance'%headings0[i]['label'])
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
line1 = 0
|
2012-11-21 21:07:59 +05:30
|
|
|
while table1.data_read(): # read next data line of ASCII table
|
2012-12-12 22:40:04 +05:30
|
|
|
if line1 not in skipLines:
|
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
myData = np.array(map(float,table1.data[column[1][i]:\
|
2012-12-12 22:40:04 +05:30
|
|
|
column[1][i]+length[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/
|
2013-09-20 19:52:37 +05:30
|
|
|
norm[i][line1-len(skipLines)])
|
2012-11-23 22:51:18 +05:30
|
|
|
line1 +=1
|
2012-11-23 20:46:51 +05:30
|
|
|
|
2014-02-04 01:00:28 +05:30
|
|
|
if (line0 != line1): raise Exception('found %s lines in 1. table and %s in 2. table'%(line0,line1))
|
2012-11-23 22:51:18 +05:30
|
|
|
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(' ********')
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
2012-12-12 22:40:04 +05:30
|
|
|
if absTol[i]:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(' * maximum absolute error %e for %s and %s'%(maxError[i],headings0[i]['label'],headings1[i]['label']))
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(' * maximum relative error %e for %s and %s'%(maxError[i],headings0[i]['label'],headings1[i]['label']))
|
|
|
|
logging.info(' ********')
|
2012-11-22 23:31:51 +05:30
|
|
|
return maxError
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-15 20:00:17 +05:30
|
|
|
def compare_TablesStatistically(self,
|
|
|
|
files = [None,None], # list of file names
|
|
|
|
columns = [None], # list of list of column labels (per file)
|
|
|
|
meanTol = 1.0e-4,
|
|
|
|
stdTol = 1.0e-6,
|
|
|
|
preFilter = 1.0e-9):
|
|
|
|
|
|
|
|
'''
|
|
|
|
calculate statistics of tables
|
|
|
|
threshold can be used to ignore small values (a negative number disables this feature)
|
|
|
|
'''
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
from collections import Iterable
|
|
|
|
|
|
|
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
|
|
|
files = [str(files)]
|
|
|
|
|
|
|
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
|
|
|
for table in tables:
|
|
|
|
table.head_read()
|
|
|
|
|
|
|
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
|
|
|
columns = columns[:len(files)] # truncate to same length as files
|
|
|
|
|
|
|
|
for i,column in enumerate(columns):
|
|
|
|
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
|
|
|
|
|
|
|
logging.info('comparing ASCIItables statistically')
|
|
|
|
for i in xrange(len(columns)):
|
|
|
|
columns[i] = columns[0] if not columns[i] else \
|
|
|
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
|
|
|
columns[i]
|
|
|
|
)
|
|
|
|
logging.info(files[i]+':'+','.join(columns[i]))
|
|
|
|
|
|
|
|
if len(files) < 2: return True # single table is always close to itself...
|
|
|
|
|
|
|
|
data = []
|
|
|
|
for table,labels in zip(tables,columns):
|
|
|
|
table.data_readArray(labels)
|
|
|
|
data.append(table.data)
|
|
|
|
table.close()
|
|
|
|
|
|
|
|
|
|
|
|
for i in xrange(1,len(data)):
|
|
|
|
delta = data[i]-data[i-1]
|
|
|
|
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
|
|
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
|
|
|
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
|
|
|
std = np.amax(np.std(normedDelta,0))
|
|
|
|
logging.info('mean: %f'%mean)
|
|
|
|
logging.info('std: %f'%std)
|
|
|
|
|
|
|
|
return (mean<meanTol) & (std < stdTol)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def compare_Tables(self,
|
|
|
|
files = [None,None], # list of file names
|
|
|
|
columns = [None], # list of list of column labels (per file)
|
|
|
|
rtol = 1e-5,
|
|
|
|
atol = 1e-8,
|
2015-12-11 22:53:36 +05:30
|
|
|
preFilter = -1.0,
|
|
|
|
postFilter = -1.0,
|
2015-12-04 03:22:03 +05:30
|
|
|
debug = False):
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-10-30 03:21:57 +05:30
|
|
|
'''
|
2015-12-04 03:22:03 +05:30
|
|
|
compare tables with np.allclose
|
|
|
|
threshold can be used to ignore small values (a negative number disables this feature)
|
2015-10-30 03:21:57 +05:30
|
|
|
'''
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-10-22 11:29:11 +05:30
|
|
|
import numpy as np
|
2015-12-04 03:22:03 +05:30
|
|
|
from collections import Iterable
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
|
|
|
files = [str(files)]
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
|
|
|
for table in tables:
|
|
|
|
table.head_read()
|
2015-10-30 03:21:57 +05:30
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
2015-12-04 03:22:03 +05:30
|
|
|
columns = columns[:len(files)] # truncate to same length as files
|
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
for i,column in enumerate(columns):
|
|
|
|
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
logging.info('comparing ASCIItables')
|
|
|
|
for i in xrange(len(columns)):
|
|
|
|
columns[i] = columns[0] if not columns[i] else \
|
|
|
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
|
|
|
columns[i]
|
|
|
|
)
|
|
|
|
logging.info(files[i]+':'+','.join(columns[i]))
|
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
if len(files) < 2: return True # single table is always close to itself...
|
2015-12-04 03:22:03 +05:30
|
|
|
|
|
|
|
maximum = np.zeros(len(columns[0]),dtype='f')
|
|
|
|
data = []
|
|
|
|
for table,labels in zip(tables,columns):
|
|
|
|
table.data_readArray(labels)
|
2015-12-11 22:53:36 +05:30
|
|
|
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data))
|
2015-12-04 03:22:03 +05:30
|
|
|
maximum += np.abs(table.data).max(axis=0)
|
|
|
|
table.close()
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
maximum /= len(tables)
|
2015-12-11 22:53:36 +05:30
|
|
|
maximum = np.where(maximum >0.0, maximum, 1) # do not devide by zero for empty columns
|
2015-12-04 03:22:03 +05:30
|
|
|
for i in xrange(len(data)):
|
|
|
|
data[i] /= maximum
|
2015-10-30 03:21:57 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
mask = np.zeros_like(table.data,dtype='bool')
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
for table in data:
|
2015-12-11 22:53:36 +05:30
|
|
|
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
|
|
|
|
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
allclose = True # start optimistic
|
2015-12-04 03:22:03 +05:30
|
|
|
for i in xrange(1,len(data)):
|
2015-12-11 22:53:36 +05:30
|
|
|
if debug:
|
|
|
|
t0 = np.where(mask,0.0,data[i-1])
|
|
|
|
t1 = np.where(mask,0.0,data[i ])
|
|
|
|
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
2015-12-15 20:00:17 +05:30
|
|
|
logging.info('%f'%np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol)))
|
2015-12-11 22:53:36 +05:30
|
|
|
logging.info('%f %f'%((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
2015-12-04 03:22:03 +05:30
|
|
|
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
2015-12-05 04:10:39 +05:30
|
|
|
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
return allclose
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def compare_TableRefCur(self,headingsRef,ref,headingsCur='',cur='',normHeadings='',normType=None,\
|
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
|
|
|
|
|
|
|
if cur == '': cur = ref
|
|
|
|
if headingsCur == '': headingsCur = headingsRef
|
|
|
|
refName = self.fileInReference(ref)
|
|
|
|
curName = self.fileInCurrent(cur)
|
2013-03-19 21:16:07 +05:30
|
|
|
return self.compare_Table(headingsRef,refName,headingsCur,curName,normHeadings,normType,
|
|
|
|
absoluteTolerance,perLine,skipLines)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2013-03-19 21:16:07 +05:30
|
|
|
def compare_TableCurCur(self,headingsCur0,Cur0,Cur1,headingsCur1='',normHeadings='',normType=None,\
|
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
|
|
|
|
|
|
|
if headingsCur1 == '': headingsCur1 = headingsCur0
|
|
|
|
cur0Name = self.fileInCurrent(Cur0)
|
|
|
|
cur1Name = self.fileInCurrent(Cur1)
|
|
|
|
return self.compare_Table(headingsCur0,cur0Name,headingsCur1,cur1Name,normHeadings,normType,
|
|
|
|
absoluteTolerance,perLine,skipLines)
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-31 18:40:14 +05:30
|
|
|
def report_Success(self,culprit):
|
2012-07-17 18:34:57 +05:30
|
|
|
|
2013-03-05 01:09:13 +05:30
|
|
|
if culprit == 0:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('%s passed.'%({False: 'The test',
|
2013-09-19 01:01:27 +05:30
|
|
|
True: 'All %i tests'%(len(self.variants))}[len(self.variants) > 1]))
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
2013-01-24 00:03:46 +05:30
|
|
|
return 0
|
2013-03-05 01:09:13 +05:30
|
|
|
if culprit == -1:
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.warning('Warning: Could not start test')
|
2013-03-05 01:09:13 +05:30
|
|
|
return 0
|
2012-01-27 15:25:19 +05:30
|
|
|
else:
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical(' ********\n * Test %i failed...\n ********'%(culprit))
|
|
|
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
2013-03-05 01:09:13 +05:30
|
|
|
return culprit
|