Merge branch 'test-new-dir' into development

This commit is contained in:
Martin Diehl 2020-11-01 19:29:52 +01:00
commit 086b215d94
5 changed files with 62 additions and 191 deletions

View File

@ -20,12 +20,12 @@ stages:
###################################################################################################
before_script:
- if [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue)x == 'x' ];
then echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue;
- if [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue)x == 'x' ];
then echo $CI_PIPELINE_ID >> $LOCAL_HOME/GitLabCI.queue;
fi
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue) != 1 ];
do sleep 5m;
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
echo -e "Currently queued pipelines:\n$(cat $LOCAL_HOME/GitLabCI.queue)\n";
done
- source $DAMASKROOT/env/DAMASK.sh
- cd $DAMASKROOT/PRIVATE/testing
@ -45,7 +45,8 @@ variables:
# ===============================================================================================
# Shortcut names
# ===============================================================================================
DAMASKROOT: "$TESTROOT/GitLabCI_Pipeline_$CI_PIPELINE_ID/DAMASK"
DAMASKROOT: "$LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID/DAMASK"
TESTROOT: "$LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID/tests"
# ===============================================================================================
# Names of module files to load
@ -75,31 +76,25 @@ variables:
MSC: "$MSC2019_1"
IntelMarc: "$IntelCompiler17_8"
HDF5Marc: "HDF5/1.10.5/Intel-17.8"
# ++++++++++++ Documentation ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Doxygen1_8_17: "Documentation/Doxygen/1.8.17"
# ------------ Defaults ----------------------------------------------
Doxygen: "$Doxygen1_8_17"
###################################################################################################
checkout:
stage: prepareAll
before_script:
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
- echo $CI_PIPELINE_ID >> $LOCAL_HOME/GitLabCI.queue
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue) != 1 ];
do sleep 5m;
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
echo -e "Currently queued pipelines:\n$(cat $LOCAL_HOME/GitLabCI.queue)\n";
done
script:
- mkdir -p $DAMASKROOT
- mkdir -p $TESTROOT
- cd $DAMASKROOT
- if [ -d DAMASK ]; then rm -rf DAMASK; fi # there might be some leftovers from a failed clone
- git clone -q git@magit1.mpie.de:damask/DAMASK.git .
- git checkout $CI_COMMIT_SHA
- git submodule update --init
- source env/DAMASK.sh
- make processing
- mkdir /tmp/$CI_PIPELINE_ID
except:
- master
- release
@ -109,7 +104,7 @@ Pytest_python:
stage: python
script:
- cd $DAMASKROOT/python
- pytest --basetemp=/tmp/${CI_PIPELINE_ID}/python
- pytest --basetemp=${TESTROOT}/python -v
except:
- master
- release
@ -195,7 +190,7 @@ grid_mech_compile_Intel:
- cp -r grid_mech_compile grid_mech_compile_Intel
- grid_mech_compile_Intel/test.py
- cd pytest
- pytest -k 'compile and grid' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_grid_Intel
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel
except:
- master
- release
@ -207,7 +202,7 @@ Compile_FEM_Intel:
- cp -r FEM_compile FEM_compile_Intel
- FEM_compile_Intel/test.py
- cd pytest
- pytest -k 'compile and mesh' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_mesh_Intel
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel
except:
- master
- release
@ -219,7 +214,7 @@ grid_mech_compile_GNU:
- cp -r grid_mech_compile grid_mech_compile_GNU
- grid_mech_compile_GNU/test.py
- cd pytest
- pytest -k 'compile and grid' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_grid_GNU
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU
except:
- master
- release
@ -231,7 +226,7 @@ Compile_FEM_GNU:
- cp -r FEM_compile FEM_compile_GNU
- FEM_compile_GNU/test.py
- cd pytest
- pytest -k 'compile and mesh' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_mesh_GNU
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU
except:
- master
- release
@ -253,7 +248,7 @@ Pytest_grid:
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd pytest
- pytest -m 'not compile' --basetemp=/tmp/${CI_PIPELINE_ID}/fortran
- pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v
except:
- master
- release
@ -314,9 +309,8 @@ Marc_compileIfort:
stage: compileMarc
script:
- module load $IntelMarc $HDF5Marc $MSC
- Marc_compileIfort/test.py
- cd pytest
- pytest -k 'compile and Marc'
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
except:
- master
- release
@ -382,10 +376,10 @@ SpectralRuntime:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd $DAMASKROOT
- make clean grid processing OPTIMIZATION=AGGRESSIVE
- cd $TESTROOT/performance # location of old results
- cd $LOCAL_HOME/performance # location of old results
- git checkout . # undo any changes (i.e. run time data from non-development branch)
- cd $DAMASKROOT/PRIVATE/testing
- SpectralAll_runtime/test.py -d $TESTROOT/performance
- SpectralAll_runtime/test.py -d $LOCAL_HOME/performance
except:
- master
- release
@ -401,20 +395,10 @@ createTar:
- release
###################################################################################################
Marc:
Python:
stage: createDocumentation
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel $Doxygen
- $DAMASKROOT/PRIVATE/documenting/runDoxygen.sh $DAMASKROOT marc
except:
- master
- release
GridSolver:
stage: createDocumentation
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel $Doxygen
- $DAMASKROOT/PRIVATE/documenting/runDoxygen.sh $DAMASKROOT grid
- echo 'tbd one matesting1'
except:
- master
- release
@ -423,11 +407,11 @@ GridSolver:
backupData:
stage: saveDocumentation
script:
- cd $TESTROOT/performance # location of new runtime results
- cd $LOCAL_HOME/performance # location of new runtime results
- git commit -am"${CI_PIPELINE_ID}_${CI_COMMIT_SHA}"
- mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}
- mv $TESTROOT/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $TESTROOT/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $LOCAL_HOME/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $LOCAL_HOME/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
only:
- development
@ -457,8 +441,8 @@ removeData:
before_script:
- echo "Removing data and lock of pipeline $CI_PIPELINE_ID"
script:
- rm -rf $TESTROOT/GitLabCI_Pipeline_$CI_PIPELINE_ID
- sed -i "/$CI_PIPELINE_ID/d" $TESTROOT/GitLabCI.queue # in case pipeline was manually (web GUI) restarted and releaseLock was performed already
- rm -rf $LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID
- sed -i "/$CI_PIPELINE_ID/d" $LOCAL_HOME/GitLabCI.queue # in case pipeline was manually (web GUI) restarted and releaseLock was performed already
except:
- master
- release
@ -469,7 +453,7 @@ removeLock:
before_script:
- echo "Removing lock of pipeline $CI_PIPELINE_ID"
when: always
script: sed -i "/$CI_PIPELINE_ID/d" $TESTROOT/GitLabCI.queue
script: sed -i "/$CI_PIPELINE_ID/d" $LOCAL_HOME/GitLabCI.queue
except:
- master
- release

@ -1 +1 @@
Subproject commit fb8647f77aeb52098605f0fd7fe95764e6d26027
Subproject commit 768bae34b26cc09a9ffa47b8f5a1b20b43dd115d

View File

@ -1,61 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Permute all values in given column(s).
""", version = scriptID)
parser.add_option('-l','--label',
dest = 'label',
action = 'extend', metavar = '<string LIST>',
help ='column(s) to permute')
parser.add_option('-u', '--unique',
dest = 'unique',
action = 'store_true',
help = 'shuffle unique values as group')
parser.add_option('-r', '--rnd',
dest = 'randomSeed',
type = 'int', metavar = 'int',
help = 'seed of random number generator [%default]')
parser.set_defaults(label = [],
unique = False,
randomSeed = None,
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
rng = np.random.default_rng(randomSeed)
for label in options.label:
data = table.get(label)
uniques,inverse = np.unique(data,return_inverse=True,axis=0) if options.unique else (data,np.arange(len(data)))
rng.shuffle(uniques)
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True)

View File

@ -1,57 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Converts ASCII table. Input can be microstructure or orientation (as quaternion). For the latter,
phase information can be given additionally.
""", version = scriptID)
parser.add_option('--coordinates',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'coordinates label (%default)')
parser.add_option('--phase',
dest = 'phase',
type = 'string', metavar = 'string',
help = 'phase label')
parser.add_option('--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'microstructure label')
parser.add_option('-q', '--quaternion',
dest = 'quaternion',
type = 'string', metavar='string',
help = 'quaternion label')
parser.set_defaults(pos= 'pos')
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
labels = []
for l in [options.quaternion,options.phase,options.microstructure]:
if l is not None: labels.append(l)
t = damask.Table.load(name)
geom = damask.Geom.from_table(t,options.pos,labels)
damask.util.croak(geom)
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom')

View File

@ -1,6 +1,7 @@
import subprocess
import shlex
import string
import re
import io
from pathlib import Path
from .. import environment
@ -27,7 +28,10 @@ class Marc:
path_MSC = environment.options['MSC_ROOT']
path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
return path_lib if path_lib.is_dir() else None
if not path_lib.is_dir():
raise FileNotFoundError(f'library path "{path_lib}" not found')
return path_lib
@property
@ -36,10 +40,12 @@ class Marc:
path_MSC = environment.options['MSC_ROOT']
path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
return path_tools if path_tools.is_dir() else None
if not path_tools.is_dir():
raise FileNotFoundError(f'tools path "{path_tools}" not found')
return path_tools
#--------------------------
def submit_job(self,
model,
job = 'job1',
@ -48,38 +54,37 @@ class Marc:
optimization = '',
):
usersub = environment.root_dir/'src/DAMASK_marc'
usersub = usersub.parent/(usersub.name + ('.f90' if compile else '.marc'))
if not usersub.is_file():
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
raise FileNotFoundError(f'subroutine ({"source" if compile else "binary"}) "{usersub}" not found')
# Define options [see Marc Installation and Operation Guide, pp 23]
script = f'run_damask_{optimization}mp'
cmd = str(self.tools_path/Path(script)) + \
' -jid ' + model + '_' + job + \
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
if compile: cmd += ' -u ' + str(usersub) + ' -save y'
else: cmd += ' -prog ' + str(usersub.with_suffix(''))
print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
if logfile: log = open(logfile, 'w')
cmd = str(self.tools_path/script) + \
' -jid ' + model+'_'+job + \
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
cmd += ' -u ' + str(usersub) + ' -save y' if compile else \
' -prog ' + str(usersub.with_suffix(''))
print(cmd)
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)
log.close()
process.wait()
#--------------------------
def exit_number_from_outFile(self,outFile=None):
exitnumber = -1
with open(outFile,'r') as fid_out:
for line in fid_out:
if (string.find(line,'tress iteration') != -1):
print(line)
elif (string.find(line,'Exit number') != -1):
substr = line[string.find(line,'Exit number'):len(line)]
exitnumber = int(substr[12:16])
if logfile is not None:
try:
f = open(logfile,'w+')
except TypeError:
f = logfile
else:
f = io.StringIO()
return exitnumber
proc = subprocess.Popen(shlex.split(cmd),stdout=f,stderr=subprocess.STDOUT)
proc.wait()
f.seek(0)
try:
v = int(re.search('Exit number ([0-9]+)',''.join(f.readlines())).group(1))
except (AttributeError,ValueError):
raise RuntimeError('Marc simulation failed (unknown return value)')
if v != 3004:
raise RuntimeError(f'Marc simulation failed ({v})')