Merge branch 'development' into geom-sort
This commit is contained in:
commit
be47c744d8
|
@ -20,12 +20,12 @@ stages:
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
before_script:
|
before_script:
|
||||||
- if [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue)x == 'x' ];
|
- if [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue)x == 'x' ];
|
||||||
then echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue;
|
then echo $CI_PIPELINE_ID >> $LOCAL_HOME/GitLabCI.queue;
|
||||||
fi
|
fi
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
echo -e "Currently queued pipelines:\n$(cat $LOCAL_HOME/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
- source $DAMASKROOT/env/DAMASK.sh
|
- source $DAMASKROOT/env/DAMASK.sh
|
||||||
- cd $DAMASKROOT/PRIVATE/testing
|
- cd $DAMASKROOT/PRIVATE/testing
|
||||||
|
@ -45,7 +45,8 @@ variables:
|
||||||
# ===============================================================================================
|
# ===============================================================================================
|
||||||
# Shortcut names
|
# Shortcut names
|
||||||
# ===============================================================================================
|
# ===============================================================================================
|
||||||
DAMASKROOT: "$TESTROOT/GitLabCI_Pipeline_$CI_PIPELINE_ID/DAMASK"
|
DAMASKROOT: "$LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID/DAMASK"
|
||||||
|
TESTROOT: "$LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID/tests"
|
||||||
|
|
||||||
# ===============================================================================================
|
# ===============================================================================================
|
||||||
# Names of module files to load
|
# Names of module files to load
|
||||||
|
@ -75,31 +76,25 @@ variables:
|
||||||
MSC: "$MSC2019_1"
|
MSC: "$MSC2019_1"
|
||||||
IntelMarc: "$IntelCompiler17_8"
|
IntelMarc: "$IntelCompiler17_8"
|
||||||
HDF5Marc: "HDF5/1.10.5/Intel-17.8"
|
HDF5Marc: "HDF5/1.10.5/Intel-17.8"
|
||||||
# ++++++++++++ Documentation ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
||||||
Doxygen1_8_17: "Documentation/Doxygen/1.8.17"
|
|
||||||
# ------------ Defaults ----------------------------------------------
|
|
||||||
Doxygen: "$Doxygen1_8_17"
|
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
checkout:
|
checkout:
|
||||||
stage: prepareAll
|
stage: prepareAll
|
||||||
before_script:
|
before_script:
|
||||||
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
|
- echo $CI_PIPELINE_ID >> $LOCAL_HOME/GitLabCI.queue
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $LOCAL_HOME/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
echo -e "Currently queued pipelines:\n$(cat $LOCAL_HOME/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
script:
|
script:
|
||||||
- mkdir -p $DAMASKROOT
|
- mkdir -p $DAMASKROOT
|
||||||
|
- mkdir -p $TESTROOT
|
||||||
- cd $DAMASKROOT
|
- cd $DAMASKROOT
|
||||||
- if [ -d DAMASK ]; then rm -rf DAMASK; fi # there might be some leftovers from a failed clone
|
|
||||||
- git clone -q git@magit1.mpie.de:damask/DAMASK.git .
|
- git clone -q git@magit1.mpie.de:damask/DAMASK.git .
|
||||||
- git checkout $CI_COMMIT_SHA
|
- git checkout $CI_COMMIT_SHA
|
||||||
- git submodule update --init
|
- git submodule update --init
|
||||||
- source env/DAMASK.sh
|
- source env/DAMASK.sh
|
||||||
- make processing
|
- make processing
|
||||||
- mkdir /tmp/$CI_PIPELINE_ID
|
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -109,7 +104,7 @@ Pytest_python:
|
||||||
stage: python
|
stage: python
|
||||||
script:
|
script:
|
||||||
- cd $DAMASKROOT/python
|
- cd $DAMASKROOT/python
|
||||||
- pytest --basetemp=/tmp/${CI_PIPELINE_ID}/python
|
- pytest --basetemp=${TESTROOT}/python -v
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -195,7 +190,7 @@ grid_mech_compile_Intel:
|
||||||
- cp -r grid_mech_compile grid_mech_compile_Intel
|
- cp -r grid_mech_compile grid_mech_compile_Intel
|
||||||
- grid_mech_compile_Intel/test.py
|
- grid_mech_compile_Intel/test.py
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -k 'compile and grid' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_grid_Intel
|
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_Intel
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -207,7 +202,7 @@ Compile_FEM_Intel:
|
||||||
- cp -r FEM_compile FEM_compile_Intel
|
- cp -r FEM_compile FEM_compile_Intel
|
||||||
- FEM_compile_Intel/test.py
|
- FEM_compile_Intel/test.py
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -k 'compile and mesh' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_mesh_Intel
|
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_Intel
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -219,7 +214,7 @@ grid_mech_compile_GNU:
|
||||||
- cp -r grid_mech_compile grid_mech_compile_GNU
|
- cp -r grid_mech_compile grid_mech_compile_GNU
|
||||||
- grid_mech_compile_GNU/test.py
|
- grid_mech_compile_GNU/test.py
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -k 'compile and grid' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_grid_GNU
|
- pytest -k 'compile and grid' --basetemp=${TESTROOT}/compile_grid_GNU
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -231,7 +226,7 @@ Compile_FEM_GNU:
|
||||||
- cp -r FEM_compile FEM_compile_GNU
|
- cp -r FEM_compile FEM_compile_GNU
|
||||||
- FEM_compile_GNU/test.py
|
- FEM_compile_GNU/test.py
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -k 'compile and mesh' --basetemp=/tmp/${CI_PIPELINE_ID}/compile_mesh_GNU
|
- pytest -k 'compile and mesh' --basetemp=${TESTROOT}/compile_mesh_GNU
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -253,7 +248,7 @@ Pytest_grid:
|
||||||
script:
|
script:
|
||||||
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
|
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -m 'not compile' --basetemp=/tmp/${CI_PIPELINE_ID}/fortran
|
- pytest -k 'not compile' --basetemp=${TESTROOT}/fortran -v
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -314,9 +309,8 @@ Marc_compileIfort:
|
||||||
stage: compileMarc
|
stage: compileMarc
|
||||||
script:
|
script:
|
||||||
- module load $IntelMarc $HDF5Marc $MSC
|
- module load $IntelMarc $HDF5Marc $MSC
|
||||||
- Marc_compileIfort/test.py
|
|
||||||
- cd pytest
|
- cd pytest
|
||||||
- pytest -k 'compile and Marc'
|
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -382,10 +376,10 @@ SpectralRuntime:
|
||||||
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
|
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
|
||||||
- cd $DAMASKROOT
|
- cd $DAMASKROOT
|
||||||
- make clean grid processing OPTIMIZATION=AGGRESSIVE
|
- make clean grid processing OPTIMIZATION=AGGRESSIVE
|
||||||
- cd $TESTROOT/performance # location of old results
|
- cd $LOCAL_HOME/performance # location of old results
|
||||||
- git checkout . # undo any changes (i.e. run time data from non-development branch)
|
- git checkout . # undo any changes (i.e. run time data from non-development branch)
|
||||||
- cd $DAMASKROOT/PRIVATE/testing
|
- cd $DAMASKROOT/PRIVATE/testing
|
||||||
- SpectralAll_runtime/test.py -d $TESTROOT/performance
|
- SpectralAll_runtime/test.py -d $LOCAL_HOME/performance
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -401,20 +395,10 @@ createTar:
|
||||||
- release
|
- release
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
Marc:
|
Python:
|
||||||
stage: createDocumentation
|
stage: createDocumentation
|
||||||
script:
|
script:
|
||||||
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel $Doxygen
|
- echo 'tbd one matesting1'
|
||||||
- $DAMASKROOT/PRIVATE/documenting/runDoxygen.sh $DAMASKROOT marc
|
|
||||||
except:
|
|
||||||
- master
|
|
||||||
- release
|
|
||||||
|
|
||||||
GridSolver:
|
|
||||||
stage: createDocumentation
|
|
||||||
script:
|
|
||||||
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel $Doxygen
|
|
||||||
- $DAMASKROOT/PRIVATE/documenting/runDoxygen.sh $DAMASKROOT grid
|
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -423,11 +407,11 @@ GridSolver:
|
||||||
backupData:
|
backupData:
|
||||||
stage: saveDocumentation
|
stage: saveDocumentation
|
||||||
script:
|
script:
|
||||||
- cd $TESTROOT/performance # location of new runtime results
|
- cd $LOCAL_HOME/performance # location of new runtime results
|
||||||
- git commit -am"${CI_PIPELINE_ID}_${CI_COMMIT_SHA}"
|
- git commit -am"${CI_PIPELINE_ID}_${CI_COMMIT_SHA}"
|
||||||
- mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}
|
- mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}
|
||||||
- mv $TESTROOT/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
- mv $LOCAL_HOME/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
||||||
- mv $TESTROOT/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
- mv $LOCAL_HOME/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
||||||
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
|
||||||
only:
|
only:
|
||||||
- development
|
- development
|
||||||
|
@ -457,8 +441,8 @@ removeData:
|
||||||
before_script:
|
before_script:
|
||||||
- echo "Removing data and lock of pipeline $CI_PIPELINE_ID"
|
- echo "Removing data and lock of pipeline $CI_PIPELINE_ID"
|
||||||
script:
|
script:
|
||||||
- rm -rf $TESTROOT/GitLabCI_Pipeline_$CI_PIPELINE_ID
|
- rm -rf $LOCAL_HOME/GitLabCI_Pipeline_$CI_PIPELINE_ID
|
||||||
- sed -i "/$CI_PIPELINE_ID/d" $TESTROOT/GitLabCI.queue # in case pipeline was manually (web GUI) restarted and releaseLock was performed already
|
- sed -i "/$CI_PIPELINE_ID/d" $LOCAL_HOME/GitLabCI.queue # in case pipeline was manually (web GUI) restarted and releaseLock was performed already
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
@ -469,7 +453,7 @@ removeLock:
|
||||||
before_script:
|
before_script:
|
||||||
- echo "Removing lock of pipeline $CI_PIPELINE_ID"
|
- echo "Removing lock of pipeline $CI_PIPELINE_ID"
|
||||||
when: always
|
when: always
|
||||||
script: sed -i "/$CI_PIPELINE_ID/d" $TESTROOT/GitLabCI.queue
|
script: sed -i "/$CI_PIPELINE_ID/d" $LOCAL_HOME/GitLabCI.queue
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
- release
|
- release
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit fb8647f77aeb52098605f0fd7fe95764e6d26027
|
Subproject commit e2301f7d12ff0ae12218d9b58e33a814eb5431c9
|
|
@ -51,4 +51,4 @@ else
|
||||||
setenv PYTHONPATH $DAMASK_ROOT/python:$PYTHONPATH
|
setenv PYTHONPATH $DAMASK_ROOT/python:$PYTHONPATH
|
||||||
endif
|
endif
|
||||||
setenv MSC_ROOT
|
setenv MSC_ROOT
|
||||||
setenv MARC_VERSION
|
setenv MSC_VERSION
|
||||||
|
|
|
@ -110,7 +110,7 @@ phase:
|
||||||
Aluminum:
|
Aluminum:
|
||||||
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
|
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
|
||||||
generic:
|
generic:
|
||||||
output: [F, P, Fe, Fp, Lp, O]
|
output: [F, P, F_e, F_p, L_p, O]
|
||||||
lattice: fcc
|
lattice: fcc
|
||||||
plasticity:
|
plasticity:
|
||||||
N_sl: [12]
|
N_sl: [12]
|
||||||
|
|
|
@ -7,15 +7,15 @@ from pathlib import Path
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
marc_version = float(damask.environment.options['MSC_VERSION'])
|
msc_version = float(damask.environment.options['MSC_VERSION'])
|
||||||
if int(marc_version) == marc_version:
|
if int(msc_version) == msc_version:
|
||||||
marc_version = int(marc_version)
|
msc_version = int(msc_version)
|
||||||
msc_root = Path(damask.environment.options['MSC_ROOT'])
|
msc_root = Path(damask.environment.options['MSC_ROOT'])
|
||||||
damask_root = damask.environment.root_dir
|
damask_root = damask.environment.root_dir
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Apply DAMASK modification to MSC.Marc/Mentat',
|
description='Apply DAMASK modification to MSC.Marc/Mentat',
|
||||||
epilog = f'MSC_ROOT={msc_root} and MSC_VERSION={marc_version} (from {damask_root}/env/CONFIG)')
|
epilog = f'MSC_ROOT={msc_root} and MSC_VERSION={msc_version} (from {damask_root}/env/CONFIG)')
|
||||||
parser.add_argument('--editor', dest='editor', metavar='string', default='vi',
|
parser.add_argument('--editor', dest='editor', metavar='string', default='vi',
|
||||||
help='Name of the editor for MSC.Mentat (executable)')
|
help='Name of the editor for MSC.Mentat (executable)')
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ def copy_and_replace(in_file,dst):
|
||||||
with open(in_file) as f:
|
with open(in_file) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
content = content.replace('%INSTALLDIR%',str(msc_root))
|
content = content.replace('%INSTALLDIR%',str(msc_root))
|
||||||
content = content.replace('%VERSION%',str(marc_version))
|
content = content.replace('%VERSION%',str(msc_version))
|
||||||
content = content.replace('%EDITOR%', parser.parse_args().editor)
|
content = content.replace('%EDITOR%', parser.parse_args().editor)
|
||||||
with open(dst/Path(in_file).name,'w') as f:
|
with open(dst/Path(in_file).name,'w') as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
@ -32,36 +32,36 @@ def copy_and_replace(in_file,dst):
|
||||||
|
|
||||||
print('adapting Marc tools...\n')
|
print('adapting Marc tools...\n')
|
||||||
|
|
||||||
src = damask_root/f'installation/mods_MarcMentat/{marc_version}/Marc_tools'
|
src = damask_root/f'installation/mods_MarcMentat/{msc_version}/Marc_tools'
|
||||||
dst = msc_root/f'marc{marc_version}/tools'
|
dst = msc_root/f'marc{msc_version}/tools'
|
||||||
for in_file in glob.glob(str(src/'*damask*')) + [str(src/'include_linux64')]:
|
for in_file in glob.glob(str(src/'*damask*')) + [str(src/'include_linux64')]:
|
||||||
copy_and_replace(in_file,dst)
|
copy_and_replace(in_file,dst)
|
||||||
|
|
||||||
|
|
||||||
print('adapting Mentat scripts and menus...\n')
|
print('adapting Mentat scripts and menus...\n')
|
||||||
|
|
||||||
src = damask_root/f'installation/mods_MarcMentat/{marc_version}/Mentat_bin'
|
src = damask_root/f'installation/mods_MarcMentat/{msc_version}/Mentat_bin'
|
||||||
dst = msc_root/f'mentat{marc_version}/bin'
|
dst = msc_root/f'mentat{msc_version}/bin'
|
||||||
for in_file in glob.glob(str(src/'*[!.original]')):
|
for in_file in glob.glob(str(src/'*[!.original]')):
|
||||||
copy_and_replace(in_file,dst)
|
copy_and_replace(in_file,dst)
|
||||||
|
|
||||||
src = damask_root/f'installation/mods_MarcMentat/{marc_version}/Mentat_menus'
|
src = damask_root/f'installation/mods_MarcMentat/{msc_version}/Mentat_menus'
|
||||||
dst = msc_root/f'mentat{marc_version}/menus'
|
dst = msc_root/f'mentat{msc_version}/menus'
|
||||||
for in_file in glob.glob(str(src/'job_run.ms')):
|
for in_file in glob.glob(str(src/'job_run.ms')):
|
||||||
copy_and_replace(in_file,dst)
|
copy_and_replace(in_file,dst)
|
||||||
|
|
||||||
|
|
||||||
print('compiling Mentat menu binaries...')
|
print('compiling Mentat menu binaries...')
|
||||||
|
|
||||||
executable = str(msc_root/f'mentat{marc_version}/bin/mentat')
|
executable = str(msc_root/f'mentat{msc_version}/bin/mentat')
|
||||||
menu_file = str(msc_root/f'mentat{marc_version}/menus/linux64/main.msb')
|
menu_file = str(msc_root/f'mentat{msc_version}/menus/linux64/main.msb')
|
||||||
os.system(f'xvfb-run {executable} -compile {menu_file}')
|
os.system(f'xvfb-run {executable} -compile {menu_file}')
|
||||||
|
|
||||||
|
|
||||||
print('setting file access rights...\n')
|
print('setting file access rights...\n')
|
||||||
|
|
||||||
for pattern in [msc_root/f'marc{marc_version}/tools/*damask*',
|
for pattern in [msc_root/f'marc{msc_version}/tools/*damask*',
|
||||||
msc_root/f'mentat{marc_version}/bin/submit?',
|
msc_root/f'mentat{msc_version}/bin/submit?',
|
||||||
msc_root/f'mentat{marc_version}/bin/kill?']:
|
msc_root/f'mentat{msc_version}/bin/kill?']:
|
||||||
for f in glob.glob(str(pattern)):
|
for f in glob.glob(str(pattern)):
|
||||||
os.chmod(f,0o755)
|
os.chmod(f,0o755)
|
||||||
|
|
|
@ -1,61 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Permute all values in given column(s).
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'label',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help ='column(s) to permute')
|
|
||||||
parser.add_option('-u', '--unique',
|
|
||||||
dest = 'unique',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'shuffle unique values as group')
|
|
||||||
parser.add_option('-r', '--rnd',
|
|
||||||
dest = 'randomSeed',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'seed of random number generator [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(label = [],
|
|
||||||
unique = False,
|
|
||||||
randomSeed = None,
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
|
||||||
rng = np.random.default_rng(randomSeed)
|
|
||||||
|
|
||||||
for label in options.label:
|
|
||||||
data = table.get(label)
|
|
||||||
uniques,inverse = np.unique(data,return_inverse=True,axis=0) if options.unique else (data,np.arange(len(data)))
|
|
||||||
rng.shuffle(uniques)
|
|
||||||
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.save((sys.stdout if name is None else name), legacy=True)
|
|
|
@ -1,57 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Converts ASCII table. Input can be microstructure or orientation (as quaternion). For the latter,
|
|
||||||
phase information can be given additionally.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('--coordinates',
|
|
||||||
dest = 'pos',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'coordinates label (%default)')
|
|
||||||
parser.add_option('--phase',
|
|
||||||
dest = 'phase',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'phase label')
|
|
||||||
parser.add_option('--microstructure',
|
|
||||||
dest = 'microstructure',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'microstructure label')
|
|
||||||
parser.add_option('-q', '--quaternion',
|
|
||||||
dest = 'quaternion',
|
|
||||||
type = 'string', metavar='string',
|
|
||||||
help = 'quaternion label')
|
|
||||||
|
|
||||||
parser.set_defaults(pos= 'pos')
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
labels = []
|
|
||||||
for l in [options.quaternion,options.phase,options.microstructure]:
|
|
||||||
if l is not None: labels.append(l)
|
|
||||||
|
|
||||||
t = damask.Table.load(name)
|
|
||||||
geom = damask.Geom.from_table(t,options.pos,labels)
|
|
||||||
damask.util.croak(geom)
|
|
||||||
|
|
||||||
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom')
|
|
|
@ -266,13 +266,9 @@ class Geom:
|
||||||
|
|
||||||
labels_ = [labels] if isinstance(labels,str) else labels
|
labels_ = [labels] if isinstance(labels,str) else labels
|
||||||
unique,unique_inverse = np.unique(np.hstack([table.get(l) for l in labels_]),return_inverse=True,axis=0)
|
unique,unique_inverse = np.unique(np.hstack([table.get(l) for l in labels_]),return_inverse=True,axis=0)
|
||||||
if len(unique) == grid.prod():
|
|
||||||
ma = np.arange(grid.prod())
|
ma = np.arange(grid.prod()) if len(unique) == grid.prod() else \
|
||||||
else:
|
np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse]
|
||||||
from_ma = pd.unique(unique_inverse)
|
|
||||||
sort_idx = np.argsort(from_ma)
|
|
||||||
idx = np.searchsorted(from_ma,unique_inverse,sorter = sort_idx)
|
|
||||||
ma = np.arange(from_ma.size)[sort_idx][idx]
|
|
||||||
|
|
||||||
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','from_table'))
|
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','from_table'))
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import subprocess
|
import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
import string
|
import re
|
||||||
|
import io
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .. import environment
|
from .. import environment
|
||||||
|
@ -27,7 +28,10 @@ class Marc:
|
||||||
path_MSC = environment.options['MSC_ROOT']
|
path_MSC = environment.options['MSC_ROOT']
|
||||||
path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
|
path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
|
||||||
|
|
||||||
return path_lib if path_lib.is_dir() else None
|
if not path_lib.is_dir():
|
||||||
|
raise FileNotFoundError(f'library path "{path_lib}" not found')
|
||||||
|
|
||||||
|
return path_lib
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -36,10 +40,12 @@ class Marc:
|
||||||
path_MSC = environment.options['MSC_ROOT']
|
path_MSC = environment.options['MSC_ROOT']
|
||||||
path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
|
path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
|
||||||
|
|
||||||
return path_tools if path_tools.is_dir() else None
|
if not path_tools.is_dir():
|
||||||
|
raise FileNotFoundError(f'tools path "{path_tools}" not found')
|
||||||
|
|
||||||
|
return path_tools
|
||||||
|
|
||||||
|
|
||||||
#--------------------------
|
|
||||||
def submit_job(self,
|
def submit_job(self,
|
||||||
model,
|
model,
|
||||||
job = 'job1',
|
job = 'job1',
|
||||||
|
@ -48,38 +54,37 @@ class Marc:
|
||||||
optimization = '',
|
optimization = '',
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|
||||||
usersub = environment.root_dir/'src/DAMASK_marc'
|
usersub = environment.root_dir/'src/DAMASK_marc'
|
||||||
usersub = usersub.parent/(usersub.name + ('.f90' if compile else '.marc'))
|
usersub = usersub.parent/(usersub.name + ('.f90' if compile else '.marc'))
|
||||||
if not usersub.is_file():
|
if not usersub.is_file():
|
||||||
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
|
raise FileNotFoundError(f'subroutine ({"source" if compile else "binary"}) "{usersub}" not found')
|
||||||
|
|
||||||
# Define options [see Marc Installation and Operation Guide, pp 23]
|
# Define options [see Marc Installation and Operation Guide, pp 23]
|
||||||
script = f'run_damask_{optimization}mp'
|
script = f'run_damask_{optimization}mp'
|
||||||
|
|
||||||
cmd = str(self.tools_path/Path(script)) + \
|
cmd = str(self.tools_path/script) + \
|
||||||
' -jid ' + model + '_' + job + \
|
' -jid ' + model+'_'+job + \
|
||||||
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
|
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
|
||||||
|
cmd += ' -u ' + str(usersub) + ' -save y' if compile else \
|
||||||
if compile: cmd += ' -u ' + str(usersub) + ' -save y'
|
' -prog ' + str(usersub.with_suffix(''))
|
||||||
else: cmd += ' -prog ' + str(usersub.with_suffix(''))
|
|
||||||
|
|
||||||
print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
|
|
||||||
if logfile: log = open(logfile, 'w')
|
|
||||||
print(cmd)
|
print(cmd)
|
||||||
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)
|
|
||||||
log.close()
|
|
||||||
process.wait()
|
|
||||||
|
|
||||||
#--------------------------
|
if logfile is not None:
|
||||||
def exit_number_from_outFile(self,outFile=None):
|
try:
|
||||||
exitnumber = -1
|
f = open(logfile,'w+')
|
||||||
with open(outFile,'r') as fid_out:
|
except TypeError:
|
||||||
for line in fid_out:
|
f = logfile
|
||||||
if (string.find(line,'tress iteration') != -1):
|
else:
|
||||||
print(line)
|
f = io.StringIO()
|
||||||
elif (string.find(line,'Exit number') != -1):
|
|
||||||
substr = line[string.find(line,'Exit number'):len(line)]
|
|
||||||
exitnumber = int(substr[12:16])
|
|
||||||
|
|
||||||
return exitnumber
|
proc = subprocess.Popen(shlex.split(cmd),stdout=f,stderr=subprocess.STDOUT)
|
||||||
|
proc.wait()
|
||||||
|
f.seek(0)
|
||||||
|
|
||||||
|
try:
|
||||||
|
v = int(re.search('Exit number ([0-9]+)',''.join(f.readlines())).group(1))
|
||||||
|
except (AttributeError,ValueError):
|
||||||
|
raise RuntimeError('Marc simulation failed (unknown return value)')
|
||||||
|
|
||||||
|
if v != 3004:
|
||||||
|
raise RuntimeError(f'Marc simulation failed ({v})')
|
||||||
|
|
|
@ -169,6 +169,7 @@ def scale_to_coprime(v):
|
||||||
|
|
||||||
def lcm(a, b):
|
def lcm(a, b):
|
||||||
"""Least common multiple."""
|
"""Least common multiple."""
|
||||||
|
# Python 3.9 provides math.lcm, see https://stackoverflow.com/questions/51716916.
|
||||||
return a * b // np.gcd(a, b)
|
return a * b // np.gcd(a, b)
|
||||||
|
|
||||||
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
||||||
|
|
|
@ -749,23 +749,23 @@ subroutine crystallite_results
|
||||||
selected_tensors = select_tensors(crystallite_partitionedF,p)
|
selected_tensors = select_tensors(crystallite_partitionedF,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'deformation gradient','1')
|
'deformation gradient','1')
|
||||||
case('Fe')
|
case('F_e')
|
||||||
selected_tensors = select_tensors(crystallite_Fe,p)
|
selected_tensors = select_tensors(crystallite_Fe,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'elastic deformation gradient','1')
|
'elastic deformation gradient','1')
|
||||||
case('Fp')
|
case('F_p')
|
||||||
selected_tensors = select_tensors(crystallite_Fp,p)
|
selected_tensors = select_tensors(crystallite_Fp,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'plastic deformation gradient','1')
|
'plastic deformation gradient','1')
|
||||||
case('Fi')
|
case('F_i')
|
||||||
selected_tensors = select_tensors(crystallite_Fi,p)
|
selected_tensors = select_tensors(crystallite_Fi,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'inelastic deformation gradient','1')
|
'inelastic deformation gradient','1')
|
||||||
case('Lp')
|
case('L_p')
|
||||||
selected_tensors = select_tensors(crystallite_Lp,p)
|
selected_tensors = select_tensors(crystallite_Lp,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'plastic velocity gradient','1/s')
|
'plastic velocity gradient','1/s')
|
||||||
case('Li')
|
case('L_i')
|
||||||
selected_tensors = select_tensors(crystallite_Li,p)
|
selected_tensors = select_tensors(crystallite_Li,p)
|
||||||
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
call results_writeDataset(group,selected_tensors,output_constituent(p)%label(o),&
|
||||||
'inelastic velocity gradient','1/s')
|
'inelastic velocity gradient','1/s')
|
||||||
|
@ -1547,11 +1547,11 @@ subroutine crystallite_restartWrite
|
||||||
fileHandle = HDF5_openFile(fileName,'a')
|
fileHandle = HDF5_openFile(fileName,'a')
|
||||||
|
|
||||||
call HDF5_write(fileHandle,crystallite_partitionedF,'F')
|
call HDF5_write(fileHandle,crystallite_partitionedF,'F')
|
||||||
call HDF5_write(fileHandle,crystallite_Fp, 'Fp')
|
call HDF5_write(fileHandle,crystallite_Fp, 'F_p')
|
||||||
call HDF5_write(fileHandle,crystallite_Fi, 'Fi')
|
call HDF5_write(fileHandle,crystallite_Fi, 'F_i')
|
||||||
call HDF5_write(fileHandle,crystallite_Lp, 'Lp')
|
call HDF5_write(fileHandle,crystallite_Lp, 'L_p')
|
||||||
call HDF5_write(fileHandle,crystallite_Li, 'Li')
|
call HDF5_write(fileHandle,crystallite_Li, 'L_i')
|
||||||
call HDF5_write(fileHandle,crystallite_S, 'S')
|
call HDF5_write(fileHandle,crystallite_S, 'S')
|
||||||
|
|
||||||
groupHandle = HDF5_addGroup(fileHandle,'constituent')
|
groupHandle = HDF5_addGroup(fileHandle,'constituent')
|
||||||
do i = 1,size(material_name_phase)
|
do i = 1,size(material_name_phase)
|
||||||
|
@ -1588,10 +1588,10 @@ subroutine crystallite_restartRead
|
||||||
fileHandle = HDF5_openFile(fileName)
|
fileHandle = HDF5_openFile(fileName)
|
||||||
|
|
||||||
call HDF5_read(fileHandle,crystallite_F0, 'F')
|
call HDF5_read(fileHandle,crystallite_F0, 'F')
|
||||||
call HDF5_read(fileHandle,crystallite_Fp0,'Fp')
|
call HDF5_read(fileHandle,crystallite_Fp0,'F_p')
|
||||||
call HDF5_read(fileHandle,crystallite_Fi0,'Fi')
|
call HDF5_read(fileHandle,crystallite_Fi0,'F_i')
|
||||||
call HDF5_read(fileHandle,crystallite_Lp0,'Lp')
|
call HDF5_read(fileHandle,crystallite_Lp0,'L_p')
|
||||||
call HDF5_read(fileHandle,crystallite_Li0,'Li')
|
call HDF5_read(fileHandle,crystallite_Li0,'L_i')
|
||||||
call HDF5_read(fileHandle,crystallite_S0, 'S')
|
call HDF5_read(fileHandle,crystallite_S0, 'S')
|
||||||
|
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'constituent')
|
groupHandle = HDF5_openGroup(fileHandle,'constituent')
|
||||||
|
|
Loading…
Reference in New Issue