2014-04-02 00:11:14 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
|
|
|
|
2011-12-22 16:00:25 +05:30
|
|
|
|
2016-03-04 22:23:55 +05:30
|
|
|
import os,sys,shutil
|
|
|
|
import logging,logging.config
|
2011-12-15 20:23:10 +05:30
|
|
|
import damask
|
2016-03-04 22:23:55 +05:30
|
|
|
import numpy as np
|
|
|
|
from collections import Iterable
|
2012-02-16 02:24:14 +05:30
|
|
|
from optparse import OptionParser
|
2011-12-15 20:23:10 +05:30
|
|
|
|
|
|
|
class Test():
|
2016-03-04 22:23:55 +05:30
|
|
|
"""
|
|
|
|
General class for testing.
|
|
|
|
|
|
|
|
Is sub-classed by the individual tests.
|
|
|
|
"""
|
2012-01-18 15:04:49 +05:30
|
|
|
|
|
|
|
variants = []
|
|
|
|
|
2012-02-16 02:24:14 +05:30
|
|
|
def __init__(self,test_description):
|
2014-06-04 21:04:35 +05:30
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.setLevel(0)
|
2016-03-21 19:48:58 +05:30
|
|
|
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
2014-06-04 21:04:35 +05:30
|
|
|
fh.setLevel(logging.DEBUG)
|
|
|
|
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
|
|
|
fh.setFormatter(full)
|
2016-03-21 19:48:58 +05:30
|
|
|
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
2014-06-04 21:04:35 +05:30
|
|
|
ch.setLevel(logging.INFO)
|
|
|
|
# create formatter and add it to the handlers
|
|
|
|
plain = logging.Formatter('%(message)s')
|
|
|
|
ch.setFormatter(plain)
|
|
|
|
# add the handlers to the logger
|
|
|
|
logger.addHandler(fh)
|
|
|
|
logger.addHandler(ch)
|
|
|
|
|
|
|
|
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \
|
2012-07-26 13:17:31 +05:30
|
|
|
+'----------------------------------------------------------------\n' \
|
|
|
|
+'| '+test_description+'\n' \
|
2013-09-19 01:01:27 +05:30
|
|
|
+'----------------------------------------------------------------')
|
2014-06-03 16:00:51 +05:30
|
|
|
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
2012-02-16 02:24:14 +05:30
|
|
|
self.parser = OptionParser(
|
2016-05-11 15:13:21 +05:30
|
|
|
description = test_description+' (using class: {})'.format(damask.version),
|
2014-06-03 18:39:52 +05:30
|
|
|
usage='./test.py [options]')
|
2014-06-11 23:16:26 +05:30
|
|
|
self.updateRequested = False
|
2015-12-15 20:00:17 +05:30
|
|
|
self.parser.add_option("-d", "--debug", action="store_true",\
|
|
|
|
dest="debug",\
|
|
|
|
help="debug run, don't calculate but use existing results")
|
|
|
|
self.parser.add_option("-p", "--pass", action="store_true",\
|
|
|
|
dest="accept",\
|
|
|
|
help="calculate results but always consider test as successfull")
|
|
|
|
self.parser.set_defaults(debug=False,
|
|
|
|
accept=False)
|
2014-06-11 23:16:26 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
def execute(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Run all variants and report first failure."""
|
2015-12-15 20:00:17 +05:30
|
|
|
if self.options.debug:
|
|
|
|
for variant in xrange(len(self.variants)):
|
|
|
|
try:
|
|
|
|
self.postprocess(variant)
|
|
|
|
if not self.compare(variant):
|
2016-03-21 19:48:58 +05:30
|
|
|
return variant+1 # return culprit
|
2015-12-15 20:00:17 +05:30
|
|
|
except Exception as e :
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('\nWARNING:\n {}\n'.format(e))
|
|
|
|
return variant+1 # return culprit
|
2015-12-15 20:00:17 +05:30
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
if not self.testPossible(): return -1
|
|
|
|
self.clean()
|
|
|
|
self.prepareAll()
|
|
|
|
for variant in xrange(len(self.variants)):
|
|
|
|
try:
|
|
|
|
self.prepare(variant)
|
|
|
|
self.run(variant)
|
|
|
|
self.postprocess(variant)
|
2016-03-21 19:48:58 +05:30
|
|
|
if self.updateRequested: # update requested
|
2015-12-15 20:00:17 +05:30
|
|
|
self.update(variant)
|
2016-03-21 19:48:58 +05:30
|
|
|
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
|
|
|
return variant+1 # return culprit
|
2015-12-15 20:00:17 +05:30
|
|
|
except Exception as e :
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('\nWARNING:\n {}\n'.format(e))
|
|
|
|
return variant+1 # return culprit
|
2015-12-15 20:00:17 +05:30
|
|
|
return 0
|
2013-03-05 01:09:13 +05:30
|
|
|
|
|
|
|
def testPossible(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Check if test is possible or not (e.g. no license available)."""
|
2013-03-05 01:09:13 +05:30
|
|
|
return True
|
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def clean(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Delete directory tree containing current results."""
|
2012-01-18 15:04:49 +05:30
|
|
|
status = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.rmtree(self.dirCurrent())
|
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
2012-01-18 15:04:49 +05:30
|
|
|
status = status and False
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.mkdir(self.dirCurrent())
|
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('creation of directory "{}" failed...'.format(self.dirCurrent()))
|
2012-01-18 15:04:49 +05:30
|
|
|
status = status and False
|
|
|
|
|
|
|
|
return status
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def prepareAll(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Do all necessary preparations for the whole test"""
|
2012-12-12 22:40:04 +05:30
|
|
|
return True
|
2012-01-18 15:04:49 +05:30
|
|
|
|
|
|
|
def prepare(self,variant):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Do all necessary preparations for the run of each test variant"""
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def run(self,variant):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Execute the requested test variant."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def postprocess(self,variant):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Perform post-processing of generated results for this test variant."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def compare(self,variant):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Compare reference to current results."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def update(self,variant):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Update reference with current results."""
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.debug('Update not necessary')
|
2012-01-18 15:04:49 +05:30
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def dirReference(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Directory containing reference results of the test."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'reference/'))
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def dirCurrent(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Directory containing current results of the test."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def dirProof(self):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Directory containing human readable proof of correctness for the test."""
|
2012-12-12 22:40:04 +05:30
|
|
|
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def fileInRoot(self,dir,file):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Path to a file in the root directory of DAMASK."""
|
2015-12-04 03:22:03 +05:30
|
|
|
return os.path.join(damask.Environment().rootDir(),dir,file)
|
|
|
|
|
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def fileInReference(self,file):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Path to a file in the refrence directory for the test."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return os.path.join(self.dirReference(),file)
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-18 15:04:49 +05:30
|
|
|
def fileInCurrent(self,file):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Path to a file in the current results directory for the test."""
|
2012-01-18 15:04:49 +05:30
|
|
|
return os.path.join(self.dirCurrent(),file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def fileInProof(self,file):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""Path to a file in the proof directory for the test."""
|
2012-12-12 22:40:04 +05:30
|
|
|
return os.path.join(self.dirProof(),file)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2016-03-04 22:23:55 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def copy(self, mapA, mapB,
|
|
|
|
A = [], B = []):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""
|
2015-12-04 03:22:03 +05:30
|
|
|
copy list of files from (mapped) source to target.
|
2016-03-04 22:23:55 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
mapA/B is one of self.fileInX.
|
2016-03-04 22:23:55 +05:30
|
|
|
"""
|
2015-12-04 03:22:03 +05:30
|
|
|
if not B or len(B) == 0: B = A
|
|
|
|
|
|
|
|
for source,target in zip(map(mapA,A),map(mapB,B)):
|
|
|
|
try:
|
|
|
|
shutil.copy2(source,target)
|
|
|
|
except:
|
|
|
|
logging.critical('error copying {} to {}'.format(source,target))
|
|
|
|
|
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
2012-02-23 23:14:09 +05:30
|
|
|
try:
|
2012-07-17 18:34:57 +05:30
|
|
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
2012-02-23 23:14:09 +05:30
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('Reference2Current: Unable to copy file "{}"'.format(file))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
|
2013-03-26 17:36:19 +05:30
|
|
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
2013-03-26 17:36:19 +05:30
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
|
|
|
except:
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.error(os.path.join(source,file))
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('Base2Current: Unable to copy file "{}"'.format(file))
|
2013-03-26 17:36:19 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('Current2Reference: Unable to copy file "{}"'.format(file))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
|
|
|
for i,file in enumerate(sourcefiles):
|
|
|
|
try:
|
|
|
|
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('Proof2Current: Unable to copy file "{}"'.format(file))
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
|
|
|
|
|
|
|
for i,file in enumerate(sourcefiles):
|
2012-02-23 23:14:09 +05:30
|
|
|
try:
|
2012-07-17 18:34:57 +05:30
|
|
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
2012-02-23 23:14:09 +05:30
|
|
|
except:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical('Current2Current: Unable to copy file "{}"'.format(file))
|
2012-01-18 15:04:49 +05:30
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2014-10-03 02:57:03 +05:30
|
|
|
def execute_inCurrentDir(self,cmd,streamIn=None):
|
|
|
|
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(cmd)
|
2015-10-09 11:21:58 +05:30
|
|
|
out,error = damask.util.execute(cmd,streamIn,self.dirCurrent())
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2014-10-03 02:57:03 +05:30
|
|
|
logging.info(error)
|
|
|
|
logging.debug(out)
|
|
|
|
|
|
|
|
return out,error
|
|
|
|
|
|
|
|
|
2012-07-18 18:01:07 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def compare_Array(self,File1,File2):
|
2015-08-13 00:02:28 +05:30
|
|
|
|
|
|
|
import numpy as np
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info('\n '.join(['comparing',File1,File2]))
|
2015-12-04 03:22:03 +05:30
|
|
|
table1 = damask.ASCIItable(name=File1,readonly=True)
|
2015-08-12 19:13:05 +05:30
|
|
|
table1.head_read()
|
|
|
|
len1=len(table1.info)+2
|
2015-12-04 03:22:03 +05:30
|
|
|
table2 = damask.ASCIItable(name=File2,readonly=True)
|
2015-08-12 19:13:05 +05:30
|
|
|
table2.head_read()
|
|
|
|
len2=len(table2.info)+2
|
|
|
|
|
2015-08-13 00:02:28 +05:30
|
|
|
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
|
|
|
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
2014-10-09 21:07:35 +05:30
|
|
|
|
|
|
|
if len(curArray) == len(refArray):
|
2012-11-21 21:07:59 +05:30
|
|
|
refArrayNonZero = refArray[refArray.nonzero()]
|
|
|
|
curArray = curArray[refArray.nonzero()]
|
2015-08-13 00:02:28 +05:30
|
|
|
max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
|
|
|
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
2012-11-21 21:07:59 +05:30
|
|
|
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
|
|
|
curArray = curArray[curArray.nonzero()]
|
2016-03-21 19:48:58 +05:30
|
|
|
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err,
|
|
|
|
refArrayNonZero[max_loc],
|
|
|
|
curArray[max_loc]))
|
2012-10-25 13:25:27 +05:30
|
|
|
return max_err
|
|
|
|
else:
|
2012-11-23 20:46:51 +05:30
|
|
|
raise Exception('mismatch in array size to compare')
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-07-17 18:34:57 +05:30
|
|
|
def compare_ArrayRefCur(self,ref,cur=''):
|
|
|
|
|
|
|
|
if cur =='': cur = ref
|
|
|
|
refName = self.fileInReference(ref)
|
|
|
|
curName = self.fileInCurrent(cur)
|
|
|
|
return self.compare_Array(refName,curName)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
def compare_ArrayCurCur(self,cur0,cur1):
|
2012-11-22 23:31:51 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
cur0Name = self.fileInCurrent(cur0)
|
|
|
|
cur1Name = self.fileInCurrent(cur1)
|
|
|
|
return self.compare_Array(cur0Name,cur1Name)
|
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None,
|
2012-12-12 22:40:04 +05:30
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2015-08-13 00:02:28 +05:30
|
|
|
import numpy as np
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
2012-12-12 22:40:04 +05:30
|
|
|
if normHeadings == '': normHeadings = headings0
|
2012-11-22 23:31:51 +05:30
|
|
|
|
2016-03-04 22:23:55 +05:30
|
|
|
# check if comparison is possible and determine lenght of columns
|
|
|
|
if len(headings0) == len(headings1) == len(normHeadings):
|
2012-11-22 23:31:51 +05:30
|
|
|
dataLength = len(headings0)
|
|
|
|
length = [1 for i in xrange(dataLength)]
|
|
|
|
shape = [[] for i in xrange(dataLength)]
|
|
|
|
data = [[] for i in xrange(dataLength)]
|
|
|
|
maxError = [0.0 for i in xrange(dataLength)]
|
2012-12-12 22:40:04 +05:30
|
|
|
absTol = [absoluteTolerance for i in xrange(dataLength)]
|
2012-11-23 22:51:18 +05:30
|
|
|
column = [[1 for i in xrange(dataLength)] for j in xrange(2)]
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
norm = [[] for i in xrange(dataLength)]
|
|
|
|
normLength = [1 for i in xrange(dataLength)]
|
|
|
|
normShape = [[] for i in xrange(dataLength)]
|
|
|
|
normColumn = [1 for i in xrange(dataLength)]
|
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
|
|
|
if headings0[i]['shape'] != headings1[i]['shape']:
|
2016-03-21 19:48:58 +05:30
|
|
|
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
2012-11-23 20:46:51 +05:30
|
|
|
shape[i] = headings0[i]['shape']
|
2015-08-13 00:02:28 +05:30
|
|
|
for j in xrange(np.shape(shape[i])[0]):
|
2012-11-23 20:46:51 +05:30
|
|
|
length[i] *= shape[i][j]
|
2012-12-12 22:40:04 +05:30
|
|
|
normShape[i] = normHeadings[i]['shape']
|
2015-08-13 00:02:28 +05:30
|
|
|
for j in xrange(np.shape(normShape[i])[0]):
|
2012-12-12 22:40:04 +05:30
|
|
|
normLength[i] *= normShape[i][j]
|
2012-11-22 23:31:51 +05:30
|
|
|
else:
|
2016-03-21 19:48:58 +05:30
|
|
|
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
|
|
|
len(headings1),
|
|
|
|
len(normHeadings)))
|
2012-01-27 15:25:19 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
table0 = damask.ASCIItable(name=file0,readonly=True)
|
2012-11-22 23:31:51 +05:30
|
|
|
table0.head_read()
|
2015-12-04 03:22:03 +05:30
|
|
|
table1 = damask.ASCIItable(name=file1,readonly=True)
|
2012-11-22 23:31:51 +05:30
|
|
|
table1.head_read()
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
2016-03-21 19:48:58 +05:30
|
|
|
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
|
|
|
|
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
|
|
|
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
2016-05-17 20:22:21 +05:30
|
|
|
if key0 not in table0.labels(raw = True):
|
2016-03-21 19:48:58 +05:30
|
|
|
raise Exception('column {} not found in 1. table...\n'.format(key0))
|
2016-05-17 20:22:21 +05:30
|
|
|
elif key1 not in table1.labels(raw = True):
|
2016-03-21 19:48:58 +05:30
|
|
|
raise Exception('column {} not found in 2. table...\n'.format(key1))
|
2016-05-17 20:22:21 +05:30
|
|
|
elif normKey not in table0.labels(raw = True):
|
2016-03-21 19:48:58 +05:30
|
|
|
raise Exception('column {} not found in 1. table...\n'.format(normKey))
|
2012-11-22 23:31:51 +05:30
|
|
|
else:
|
2016-05-17 20:22:21 +05:30
|
|
|
column[0][i] = table0.label_index(key0)
|
|
|
|
column[1][i] = table1.label_index(key1)
|
|
|
|
normColumn[i] = table0.label_index(normKey)
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
line0 = 0
|
2016-03-21 19:48:58 +05:30
|
|
|
while table0.data_read(): # read next data line of ASCII table
|
2012-12-12 22:40:04 +05:30
|
|
|
if line0 not in skipLines:
|
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
myData = np.array(map(float,table0.data[column[0][i]:\
|
2016-03-21 19:48:58 +05:30
|
|
|
column[0][i]+length[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
normData = np.array(map(float,table0.data[normColumn[i]:\
|
2016-03-21 19:48:58 +05:30
|
|
|
normColumn[i]+normLength[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
data[i] = np.append(data[i],np.reshape(myData,shape[i]))
|
2012-12-12 22:40:04 +05:30
|
|
|
if normType == 'pInf':
|
2015-08-13 00:02:28 +05:30
|
|
|
norm[i] = np.append(norm[i],np.max(np.abs(normData)))
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2015-08-13 00:02:28 +05:30
|
|
|
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
2016-03-21 19:48:58 +05:30
|
|
|
line0 += 1
|
2012-11-21 21:07:59 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
|
|
|
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
2012-12-12 22:40:04 +05:30
|
|
|
if any(norm[i]) == 0.0 or absTol[i]:
|
|
|
|
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
|
|
|
absTol[i] = True
|
|
|
|
if perLine:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.warning('Maximum norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2012-11-22 23:31:51 +05:30
|
|
|
line1 = 0
|
2016-03-21 19:48:58 +05:30
|
|
|
while table1.data_read(): # read next data line of ASCII table
|
2012-12-12 22:40:04 +05:30
|
|
|
if line1 not in skipLines:
|
|
|
|
for i in xrange(dataLength):
|
2015-08-13 00:02:28 +05:30
|
|
|
myData = np.array(map(float,table1.data[column[1][i]:\
|
2012-12-12 22:40:04 +05:30
|
|
|
column[1][i]+length[i]]),'d')
|
2015-08-13 00:02:28 +05:30
|
|
|
maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/
|
2013-09-20 19:52:37 +05:30
|
|
|
norm[i][line1-len(skipLines)])
|
2012-11-23 22:51:18 +05:30
|
|
|
line1 +=1
|
2012-11-23 20:46:51 +05:30
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1))
|
2012-11-23 22:51:18 +05:30
|
|
|
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(' ********')
|
2012-11-22 23:31:51 +05:30
|
|
|
for i in xrange(dataLength):
|
2012-12-12 22:40:04 +05:30
|
|
|
if absTol[i]:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
|
|
|
headings0[i]['label'],
|
|
|
|
headings1[i]['label']))
|
2012-12-12 22:40:04 +05:30
|
|
|
else:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i],
|
|
|
|
headings0[i]['label'],
|
|
|
|
headings1[i]['label']))
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.info(' ********')
|
2012-11-22 23:31:51 +05:30
|
|
|
return maxError
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-15 20:00:17 +05:30
|
|
|
def compare_TablesStatistically(self,
|
2016-03-21 19:48:58 +05:30
|
|
|
files = [None,None], # list of file names
|
|
|
|
columns = [None], # list of list of column labels (per file)
|
2015-12-15 20:00:17 +05:30
|
|
|
meanTol = 1.0e-4,
|
|
|
|
stdTol = 1.0e-6,
|
|
|
|
preFilter = 1.0e-9):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""
|
|
|
|
calculate statistics of tables
|
2015-12-15 20:00:17 +05:30
|
|
|
|
2016-03-04 22:23:55 +05:30
|
|
|
threshold can be used to ignore small values (a negative number disables this feature)
|
|
|
|
"""
|
2016-03-21 19:48:58 +05:30
|
|
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
2015-12-15 20:00:17 +05:30
|
|
|
files = [str(files)]
|
|
|
|
|
|
|
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
|
|
|
for table in tables:
|
|
|
|
table.head_read()
|
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
|
|
|
columns = columns[:len(files)] # truncate to same length as files
|
2015-12-15 20:00:17 +05:30
|
|
|
|
|
|
|
for i,column in enumerate(columns):
|
2016-05-17 20:22:21 +05:30
|
|
|
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
2015-12-15 20:00:17 +05:30
|
|
|
|
|
|
|
logging.info('comparing ASCIItables statistically')
|
|
|
|
for i in xrange(len(columns)):
|
|
|
|
columns[i] = columns[0] if not columns[i] else \
|
|
|
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
|
|
|
columns[i]
|
|
|
|
)
|
|
|
|
logging.info(files[i]+':'+','.join(columns[i]))
|
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
if len(files) < 2: return True # single table is always close to itself...
|
2015-12-15 20:00:17 +05:30
|
|
|
|
|
|
|
data = []
|
|
|
|
for table,labels in zip(tables,columns):
|
|
|
|
table.data_readArray(labels)
|
|
|
|
data.append(table.data)
|
|
|
|
table.close()
|
|
|
|
|
|
|
|
|
|
|
|
for i in xrange(1,len(data)):
|
|
|
|
delta = data[i]-data[i-1]
|
|
|
|
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
|
|
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
|
|
|
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
|
|
|
std = np.amax(np.std(normedDelta,0))
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info('mean: {:f}'.format(mean))
|
|
|
|
logging.info('std: {:f}'.format(std))
|
2015-12-15 20:00:17 +05:30
|
|
|
|
|
|
|
return (mean<meanTol) & (std < stdTol)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
def compare_Tables(self,
|
2016-03-21 19:48:58 +05:30
|
|
|
files = [None,None], # list of file names
|
|
|
|
columns = [None], # list of list of column labels (per file)
|
2015-12-04 03:22:03 +05:30
|
|
|
rtol = 1e-5,
|
|
|
|
atol = 1e-8,
|
2015-12-11 22:53:36 +05:30
|
|
|
preFilter = -1.0,
|
|
|
|
postFilter = -1.0,
|
2015-12-04 03:22:03 +05:30
|
|
|
debug = False):
|
2016-03-04 22:23:55 +05:30
|
|
|
"""
|
|
|
|
compare tables with np.allclose
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2016-03-04 22:23:55 +05:30
|
|
|
threshold can be used to ignore small values (a negative number disables this feature)
|
|
|
|
"""
|
2016-03-21 19:48:58 +05:30
|
|
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
2015-12-04 03:22:03 +05:30
|
|
|
files = [str(files)]
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
|
|
|
for table in tables:
|
|
|
|
table.head_read()
|
2015-10-30 03:21:57 +05:30
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
|
|
|
columns = columns[:len(files)] # truncate to same length as files
|
2015-12-04 03:22:03 +05:30
|
|
|
|
2015-12-05 04:10:39 +05:30
|
|
|
for i,column in enumerate(columns):
|
2016-05-17 20:22:21 +05:30
|
|
|
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
logging.info('comparing ASCIItables')
|
|
|
|
for i in xrange(len(columns)):
|
|
|
|
columns[i] = columns[0] if not columns[i] else \
|
|
|
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
|
|
|
columns[i]
|
|
|
|
)
|
|
|
|
logging.info(files[i]+':'+','.join(columns[i]))
|
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
if len(files) < 2: return True # single table is always close to itself...
|
2015-12-04 03:22:03 +05:30
|
|
|
|
|
|
|
maximum = np.zeros(len(columns[0]),dtype='f')
|
|
|
|
data = []
|
|
|
|
for table,labels in zip(tables,columns):
|
|
|
|
table.data_readArray(labels)
|
2015-12-11 22:53:36 +05:30
|
|
|
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data))
|
2015-12-04 03:22:03 +05:30
|
|
|
maximum += np.abs(table.data).max(axis=0)
|
|
|
|
table.close()
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
maximum /= len(tables)
|
2016-03-21 19:48:58 +05:30
|
|
|
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
|
2015-12-04 03:22:03 +05:30
|
|
|
for i in xrange(len(data)):
|
|
|
|
data[i] /= maximum
|
2015-10-30 03:21:57 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
mask = np.zeros_like(table.data,dtype='bool')
|
2015-12-05 04:10:39 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
for table in data:
|
2016-03-21 19:48:58 +05:30
|
|
|
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
2015-12-11 22:53:36 +05:30
|
|
|
|
|
|
|
|
2016-03-21 19:48:58 +05:30
|
|
|
allclose = True # start optimistic
|
2015-12-04 03:22:03 +05:30
|
|
|
for i in xrange(1,len(data)):
|
2015-12-11 22:53:36 +05:30
|
|
|
if debug:
|
|
|
|
t0 = np.where(mask,0.0,data[i-1])
|
|
|
|
t1 = np.where(mask,0.0,data[i ])
|
|
|
|
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
|
|
|
|
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
2015-12-04 03:22:03 +05:30
|
|
|
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
2016-03-21 19:48:58 +05:30
|
|
|
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2015-12-04 03:22:03 +05:30
|
|
|
return allclose
|
2015-10-22 11:29:11 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
|
|
|
def compare_TableRefCur(self,headingsRef,ref,headingsCur='',cur='',normHeadings='',normType=None,\
|
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
|
|
|
|
|
|
|
if cur == '': cur = ref
|
|
|
|
if headingsCur == '': headingsCur = headingsRef
|
|
|
|
refName = self.fileInReference(ref)
|
|
|
|
curName = self.fileInCurrent(cur)
|
2013-03-19 21:16:07 +05:30
|
|
|
return self.compare_Table(headingsRef,refName,headingsCur,curName,normHeadings,normType,
|
|
|
|
absoluteTolerance,perLine,skipLines)
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-12-12 22:40:04 +05:30
|
|
|
|
2013-03-19 21:16:07 +05:30
|
|
|
def compare_TableCurCur(self,headingsCur0,Cur0,Cur1,headingsCur1='',normHeadings='',normType=None,\
|
|
|
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
|
|
|
|
|
|
|
if headingsCur1 == '': headingsCur1 = headingsCur0
|
|
|
|
cur0Name = self.fileInCurrent(Cur0)
|
|
|
|
cur1Name = self.fileInCurrent(Cur1)
|
|
|
|
return self.compare_Table(headingsCur0,cur0Name,headingsCur1,cur1Name,normHeadings,normType,
|
|
|
|
absoluteTolerance,perLine,skipLines)
|
|
|
|
|
2014-06-03 18:39:52 +05:30
|
|
|
|
2012-01-31 18:40:14 +05:30
|
|
|
def report_Success(self,culprit):
|
2012-07-17 18:34:57 +05:30
|
|
|
|
2013-03-05 01:09:13 +05:30
|
|
|
if culprit == 0:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed')
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
2013-01-24 00:03:46 +05:30
|
|
|
return 0
|
2013-03-05 01:09:13 +05:30
|
|
|
if culprit == -1:
|
2014-06-07 14:06:43 +05:30
|
|
|
logging.warning('Warning: Could not start test')
|
2013-03-05 01:09:13 +05:30
|
|
|
return 0
|
2012-01-27 15:25:19 +05:30
|
|
|
else:
|
2016-03-21 19:48:58 +05:30
|
|
|
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit))
|
2014-06-04 21:04:35 +05:30
|
|
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
2013-03-05 01:09:13 +05:30
|
|
|
return culprit
|