Merge branch 'development' into 26_autodoc-2
This commit is contained in:
commit
0cc4e582e4
|
@ -7,9 +7,9 @@ stages:
|
|||
- compilePETScGNU
|
||||
- prepareSpectral
|
||||
- spectral
|
||||
- compileMarc2018_1
|
||||
- compileMarc
|
||||
- marc
|
||||
- compileAbaqus2017
|
||||
- compileAbaqus
|
||||
- example
|
||||
- performance
|
||||
- createPackage
|
||||
|
@ -51,39 +51,37 @@ variables:
|
|||
# Names of module files to load
|
||||
# ===============================================================================================
|
||||
# ++++++++++++ Compiler ++++++++++++++++++++++++++++++++++++++++++++++
|
||||
IntelCompiler16_0: "Compiler/Intel/16.0 Libraries/IMKL/2016"
|
||||
IntelCompiler16_4: "Compiler/Intel/16.4 Libraries/IMKL/2016-4"
|
||||
IntelCompiler17_0: "Compiler/Intel/17.0 Libraries/IMKL/2017"
|
||||
IntelCompiler18_1: "Compiler/Intel/18.1 Libraries/IMKL/2018"
|
||||
GNUCompiler7_3: "Compiler/GNU/7.3"
|
||||
IntelCompiler16_4: "Compiler/Intel/16.4 Libraries/IMKL/2016"
|
||||
IntelCompiler17_8: "Compiler/Intel/17.8 Libraries/IMKL/2017"
|
||||
IntelCompiler18_4: "Compiler/Intel/18.4 Libraries/IMKL/2018"
|
||||
GNUCompiler8_2: "Compiler/GNU/8.2"
|
||||
# ------------ Defaults ----------------------------------------------
|
||||
IntelCompiler: "$IntelCompiler18_1"
|
||||
GNUCompiler: "$GNUCompiler7_3"
|
||||
IntelCompiler: "$IntelCompiler18_4"
|
||||
GNUCompiler: "$GNUCompiler8_2"
|
||||
# ++++++++++++ MPI +++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
MPICH3_2Intel18_1: "MPI/Intel/18.1/MPICH/3.2.1"
|
||||
MPICH3_2GNU7_3: "MPI/GNU/7.3/MPICH/3.2.1"
|
||||
IMPI2018Intel18_4: "MPI/Intel/18.4/IntelMPI/2018"
|
||||
MPICH3_3GNU8_2: "MPI/GNU/8.2/MPICH/3.3"
|
||||
# ------------ Defaults ----------------------------------------------
|
||||
MPICH_Intel: "$MPICH3_2Intel18_1"
|
||||
MPICH_GNU: "$MPICH3_2GNU7_3"
|
||||
MPICH_Intel: "$IMPI2018Intel18_4"
|
||||
MPICH_GNU: "$MPICH3_3GNU8_2"
|
||||
# ++++++++++++ PETSc +++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
PETSc3_10_0MPICH3_2Intel18_1: "Libraries/PETSc/3.10.0/Intel-18.1-MPICH-3.2.1"
|
||||
PETSc3_10_0MPICH3_2GNU7_3: "Libraries/PETSc/3.10.0/GNU-7.3-MPICH-3.2.1"
|
||||
PETSc3_10_3IMPI2018Intel18_4: "Libraries/PETSc/3.10.3/Intel-18.4-IntelMPI-2018"
|
||||
PETSc3_10_3MPICH3_3GNU8_2: "Libraries/PETSc/3.10.3/GNU-8.2-MPICH-3.3"
|
||||
# ------------ Defaults ----------------------------------------------
|
||||
PETSc_MPICH_Intel: "$PETSc3_10_0MPICH3_2Intel18_1"
|
||||
PETSc_MPICH_GNU: "$PETSc3_10_0MPICH3_2GNU7_3"
|
||||
PETSc_MPICH_Intel: "$PETSc3_10_3IMPI2018Intel18_4"
|
||||
PETSc_MPICH_GNU: "$PETSc3_10_3MPICH3_3GNU8_2"
|
||||
# ++++++++++++ FEM +++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
Abaqus2017: "FEM/Abaqus/2017"
|
||||
Abaqus2019: "FEM/Abaqus/2019"
|
||||
MSC2018_1: "FEM/MSC/2018.1"
|
||||
MSC2017: "FEM/MSC/2017"
|
||||
# ------------ Defaults ----------------------------------------------
|
||||
Abaqus: "$Abaqus2017"
|
||||
Abaqus: "$Abaqus2019"
|
||||
MSC: "$MSC2018_1"
|
||||
IntelMarc: "$IntelCompiler17_0"
|
||||
IntelMarc: "$IntelCompiler17_8"
|
||||
IntelAbaqus: "$IntelCompiler16_4"
|
||||
# ++++++++++++ Documentation +++++++++++++++++++++++++++++++++++++++++
|
||||
Doxygen1_8_13: "Documentation/Doxygen/1.8.13"
|
||||
Doxygen1_8_15: "Documentation/Doxygen/1.8.15"
|
||||
# ------------ Defaults ----------------------------------------------
|
||||
Doxygen: "$Doxygen1_8_13"
|
||||
Doxygen: "$Doxygen1_8_15"
|
||||
|
||||
|
||||
###################################################################################################
|
||||
|
@ -158,6 +156,13 @@ Post_AverageDown:
|
|||
- master
|
||||
- release
|
||||
|
||||
Post_ASCIItable:
|
||||
stage: postprocessing
|
||||
script: ASCIItable/test.py
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
||||
Post_General:
|
||||
stage: postprocessing
|
||||
script: PostProcessing/test.py
|
||||
|
@ -383,9 +388,9 @@ TextureComponents:
|
|||
|
||||
###################################################################################################
|
||||
Marc_compileIfort2018_1:
|
||||
stage: compileMarc2018_1
|
||||
stage: compileMarc
|
||||
script:
|
||||
- module load $IntelCompiler17_0 $MSC2018_1
|
||||
- module load $IntelMarc $MSC
|
||||
- Marc_compileIfort/test.py -m 2018.1
|
||||
except:
|
||||
- master
|
||||
|
@ -430,11 +435,11 @@ J2_plasticBehavior:
|
|||
- release
|
||||
|
||||
###################################################################################################
|
||||
Abaqus_compile2017:
|
||||
stage: compileAbaqus2017
|
||||
Abaqus_compile:
|
||||
stage: compileAbaqus
|
||||
script:
|
||||
- module load $IntelCompiler16_4 $Abaqus2017
|
||||
- Abaqus_compileIfort/test.py -a 2017
|
||||
- module load $IntelAbaqus $Abaqus
|
||||
- Abaqus_compileIfort/test.py
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
|
2
CONFIG
2
CONFIG
|
@ -8,6 +8,6 @@ set DAMASK_NUM_THREADS = 4
|
|||
set MSC_ROOT = /opt/msc
|
||||
set MARC_VERSION = 2018.1
|
||||
|
||||
set ABAQUS_VERSION = 2017
|
||||
set ABAQUS_VERSION = 2019
|
||||
|
||||
set DAMASK_HDF5 = OFF
|
||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit 30434a528f69d77eef1be91e8a2f2fc5e0f85054
|
||||
Subproject commit 18ba1ba6a5e9ba446dc9311acf2acf2781614db1
|
1
README
1
README
|
@ -10,3 +10,4 @@ Germany
|
|||
|
||||
Email: DAMASK@mpie.de
|
||||
https://damask.mpie.de
|
||||
https://magit1.mpie.de
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
@ -21,7 +21,7 @@ Add data of selected column(s) from (first) row of linked ASCIItable that shares
|
|||
parser.add_option('--link',
|
||||
dest = 'link', nargs = 2,
|
||||
type = 'string', metavar = 'string string',
|
||||
help = 'column labels containing linked values')
|
||||
help = 'column labels of table and linked table containing linking values')
|
||||
parser.add_option('-l','--label',
|
||||
dest = 'label',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
|
@ -105,7 +105,8 @@ for name in filenames:
|
|||
outputAlive = True
|
||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||
try:
|
||||
table.data_append(data[np.argwhere(np.all((map(float,table.data[myLink:myLink+myLinkDim]) - index)==0,axis=1))[0]]) # add data of first matching line
|
||||
table.data_append(data[np.argwhere(np.all((list(map(float,table.data[myLink:myLink+myLinkDim])) - index)==0,
|
||||
axis=1))[0]]) # add data of first matching line
|
||||
except IndexError:
|
||||
table.data_append(np.nan*np.ones_like(data[0])) # or add NaNs
|
||||
outputAlive = table.data_write() # output processed line
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,vtk
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,vtk
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,vtk
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
|
@ -49,7 +49,7 @@ parser.set_defaults(d = 1,
|
|||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
options.immutable = map(int,options.immutable)
|
||||
options.immutable = list(map(int,options.immutable))
|
||||
|
||||
getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||
|
@ -70,9 +70,9 @@ for name in filenames:
|
|||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: {}'.format(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: {}'.format(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: {}'.format(' : '.join(map(str,info['origin']))),
|
||||
damask.util.croak(['grid a b c: {}'.format(' x '.join(list(map(str,info['grid'])))),
|
||||
'size x y z: {}'.format(' x '.join(list(map(str,info['size'])))),
|
||||
'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))),
|
||||
'homogenization: {}'.format(info['homogenization']),
|
||||
'microstructures: {}'.format(info['microstructures']),
|
||||
])
|
||||
|
@ -102,9 +102,9 @@ for name in filenames:
|
|||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \
|
||||
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(info['grid'] == 1))/2.,dtype=np.float32)
|
||||
|
||||
gauss[:,:,:grid[2]/2:-1] = gauss[:,:,1:(grid[2]+1)/2] # trying to cope with uneven (odd) grid size
|
||||
gauss[:,:grid[1]/2:-1,:] = gauss[:,1:(grid[1]+1)/2,:]
|
||||
gauss[:grid[0]/2:-1,:,:] = gauss[1:(grid[0]+1)/2,:,:]
|
||||
gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size
|
||||
gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:]
|
||||
gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:]
|
||||
gauss = np.fft.rfftn(gauss).astype(np.complex64)
|
||||
|
||||
for smoothIter in range(options.N):
|
||||
|
@ -119,9 +119,9 @@ for name in filenames:
|
|||
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
||||
|
||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2]
|
||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||
grid[1]//2:-grid[1]//2,
|
||||
grid[2]//2:-grid[2]//2]
|
||||
|
||||
# transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel
|
||||
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
|
||||
|
@ -148,15 +148,15 @@ for name in filenames:
|
|||
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
||||
structure = struc,
|
||||
iterations = int(round(options.d*2.))-1),# fat boundary
|
||||
periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary...
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2], # ...and zero everywhere else
|
||||
periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary...
|
||||
grid[1]//2:-grid[1]//2,
|
||||
grid[2]//2:-grid[2]//2], # ...and zero everywhere else
|
||||
0.)).astype(np.complex64) *
|
||||
gauss).astype(np.float32)
|
||||
|
||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend the smoothed bulk energy
|
||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||
grid[1]//2:-grid[1]//2,
|
||||
grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy
|
||||
|
||||
|
||||
# transform voxels close to interface region
|
||||
|
@ -164,15 +164,15 @@ for name in filenames:
|
|||
return_distances = False,
|
||||
return_indices = True) # want index of closest bulk grain
|
||||
|
||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||
grid[1]//2:-grid[1]//2,
|
||||
grid[2]//2:-grid[2]//2] # periodically extend the microstructure
|
||||
|
||||
microstructure = periodic_microstructure[index[0],
|
||||
index[1],
|
||||
index[2]].reshape(2*grid)[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # extent grains into interface region
|
||||
index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2,
|
||||
grid[1]//2:-grid[1]//2,
|
||||
grid[2]//2:-grid[2]//2] # extent grains into interface region
|
||||
|
||||
# replace immutable microstructures with closest mutable ones
|
||||
index = ndimage.morphology.distance_transform_edt(np.in1d(microstructure,options.immutable).reshape(grid),
|
||||
|
@ -236,3 +236,4 @@ for name in filenames:
|
|||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
|
@ -48,11 +48,11 @@ for name in filenames:
|
|||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
damask.util.croak(['grid a b c: {}'.format(' x '.join(list(map(str,info['grid'])))),
|
||||
'size x y z: {}'.format(' x '.join(list(map(str,info['size'])))),
|
||||
'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))),
|
||||
'homogenization: {}'.format(info['homogenization']),
|
||||
'microstructures: {}'.format(info['microstructures']),
|
||||
])
|
||||
|
||||
errors = []
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
#!/usr/bin/env python2.7
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import threading,time,os,sys,random
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
import binascii
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
|
@ -96,7 +97,7 @@ class myThread (threading.Thread):
|
|||
perturbedGeomVFile = StringIO()
|
||||
perturbedSeedsVFile.reset()
|
||||
perturbedGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
||||
' -g '+' '.join(map(str, options.grid)),streamIn=perturbedSeedsVFile)[0])
|
||||
' -g '+' '.join(list(map(str, options.grid))),streamIn=perturbedSeedsVFile)[0])
|
||||
perturbedGeomVFile.reset()
|
||||
|
||||
#--- evaluate current seeds file ----------------------------------------------------------------------
|
||||
|
@ -214,7 +215,7 @@ options = parser.parse_args()[0]
|
|||
damask.util.report(scriptName,options.seedFile)
|
||||
|
||||
if options.randomSeed is None:
|
||||
options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||
options.randomSeed = int(binascii.hexlify(os.urandom(4)),16)
|
||||
damask.util.croak(options.randomSeed)
|
||||
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])
|
||||
baseFile=os.path.splitext(os.path.basename(options.seedFile))[0]
|
||||
|
@ -240,17 +241,17 @@ if os.path.isfile(os.path.splitext(options.seedFile)[0]+'.seeds'):
|
|||
for line in initialSeedFile: bestSeedsVFile.write(line)
|
||||
else:
|
||||
bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\
|
||||
' -g '+' '.join(map(str, options.grid))+\
|
||||
' -g '+' '.join(list(map(str, options.grid)))+\
|
||||
' -r {:d}'.format(options.randomSeed)+\
|
||||
' -N '+str(nMicrostructures))[0])
|
||||
bestSeedsUpdate = time.time()
|
||||
|
||||
# ----------- tessellate initial seed file to get and evaluate geom file
|
||||
bestSeedsVFile.reset()
|
||||
bestSeedsVFile.seek(0)
|
||||
initialGeomVFile = StringIO()
|
||||
initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
||||
' -g '+' '.join(map(str, options.grid)),bestSeedsVFile)[0])
|
||||
initialGeomVFile.reset()
|
||||
' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0])
|
||||
initialGeomVFile.seek(0)
|
||||
initialGeomTable = damask.ASCIItable(initialGeomVFile,None,labeled=False,readonly=True)
|
||||
initialGeomTable.head_read()
|
||||
info,devNull = initialGeomTable.head_getGeom()
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from .solver import Solver
|
||||
import damask
|
||||
import subprocess,re
|
||||
import subprocess
|
||||
|
||||
class Abaqus(Solver):
|
||||
|
||||
|
@ -15,14 +15,13 @@ class Abaqus(Solver):
|
|||
|
||||
def return_run_command(self,model):
|
||||
env=damask.Environment()
|
||||
shortVersion = re.sub('[\.,-]', '',self.version)
|
||||
try:
|
||||
cmd='abq'+shortVersion
|
||||
subprocess.check_output(['abq'+shortVersion,'information=release'])
|
||||
cmd='abq'+self.version
|
||||
subprocess.check_output([cmd,'information=release'])
|
||||
except OSError: # link to abqXXX not existing
|
||||
cmd='abaqus'
|
||||
process = subprocess.Popen(['abaqus','information=release'],stdout = subprocess.PIPE,stderr = subprocess.PIPE)
|
||||
detectedVersion = process.stdout.readlines()[1].split()[1]
|
||||
detectedVersion = process.stdout.readlines()[1].split()[1].decode('utf-8')
|
||||
if self.version != detectedVersion:
|
||||
raise Exception('found Abaqus version %s, but requested %s'%(detectedVersion,self.version))
|
||||
return '%s -job %s -user %s/src/DAMASK_abaqus interactive'%(cmd,model,env.rootDir())
|
||||
raise Exception('found Abaqus version {}, but requested {}'.format(detectedVersion,self.version))
|
||||
return '{} -job {} -user {}/src/DAMASK_abaqus interactive'.format(cmd,model,env.rootDir())
|
||||
|
|
Loading…
Reference in New Issue