fixed wrong setting of coordinates in spectral solvers and removed old python scripts for setup of code and processing
This commit is contained in:
parent
f9c45fd931
commit
cc9eb685fe
|
@ -187,8 +187,8 @@ subroutine AL_init(temperature)
|
|||
read (777,rec=1) temp3333_Real
|
||||
close (777)
|
||||
endif
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(&
|
||||
F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
call Utilities_constitutiveResponse(F,F,temperature,0.0_pReal,P,temp3333_Real2,&
|
||||
temp33_Real,.false.,math_I3)
|
||||
call DMDAVecRestoreArrayF90(da,solution_vec,xx_psc,ierr); CHKERRQ(ierr)
|
||||
|
@ -313,8 +313,8 @@ type(tSolutionState) function &
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! update coordinates and rate and forward last inc
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(&
|
||||
F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
Fdot = Utilities_calculateRate(math_rotate_backward33(f_aimDot,rotation_BC), &
|
||||
timeinc_old,guess,F_lastInc,reshape(F,[3,3,res(1),res(2),res(3)]))
|
||||
F_lambdaDot = Utilities_calculateRate(math_rotate_backward33(f_aimDot,rotation_BC), &
|
||||
|
|
|
@ -125,8 +125,7 @@ subroutine basic_init(temperature)
|
|||
read (777,rec=1) temp3333_Real
|
||||
close (777)
|
||||
endif
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,F),[3,1,mesh_NcpElems])
|
||||
call Utilities_constitutiveResponse(F,F,temperature,0.0_pReal,P,C,temp33_Real,.false.,math_I3) ! constitutive response with no deformation in no time to get reference stiffness
|
||||
if (restartInc == 1_pInt) then ! use initial stiffness as reference stiffness
|
||||
temp3333_Real = C
|
||||
|
@ -245,8 +244,7 @@ type(tSolutionState) function &
|
|||
C = C_lastInc
|
||||
else
|
||||
C_lastInc = C
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,F),[3,1,mesh_NcpElems])
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! calculate rate for aim
|
||||
|
|
|
@ -175,8 +175,8 @@ subroutine basicPETSc_init(temperature)
|
|||
read (777,rec=1) temp3333_Real
|
||||
close (777)
|
||||
endif
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(&
|
||||
F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
call Utilities_constitutiveResponse(&
|
||||
reshape(F(0:8,0:res(1)-1_pInt,0:res(2)-1_pInt,0:res(3)-1_pInt),[3,3,res(1),res(2),res(3)]),&
|
||||
reshape(F(0:8,0:res(1)-1_pInt,0:res(2)-1_pInt,0:res(3)-1_pInt),[3,3,res(1),res(2),res(3)]),&
|
||||
|
@ -264,16 +264,16 @@ type(tSolutionState) function &
|
|||
write (777,rec=1) C_lastInc
|
||||
close(777)
|
||||
endif
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(F,[3,3,res(1),res(2),res(3)])),&
|
||||
[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(&
|
||||
F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
if ( cutBack) then
|
||||
F_aim = F_aim_lastInc
|
||||
F = reshape(F_lastInc,[9,res(1),res(2),res(3)])
|
||||
C = C_lastInc
|
||||
else
|
||||
C_lastInc = C
|
||||
mesh_ipCoordinates = 0.0_pReal !reshape(mesh_deformedCoordsFFT(geomdim,&
|
||||
!reshape(F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
mesh_ipCoordinates = reshape(mesh_deformedCoordsFFT(geomdim,reshape(&
|
||||
F,[3,3,res(1),res(2),res(3)])),[3,1,mesh_NcpElems])
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! calculate rate for aim
|
||||
|
|
|
@ -1,105 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import os,string,re
|
||||
import damask
|
||||
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
||||
|
||||
# -----------------------------
|
||||
class extendedOption(Option):
|
||||
# -----------------------------
|
||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
||||
|
||||
ACTIONS = Option.ACTIONS + ("extend",)
|
||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
||||
|
||||
def take_action(self, action, dest, opt, value, values, parser):
|
||||
if action == "extend":
|
||||
lvalue = value.split(",")
|
||||
values.ensure_value(dest, []).extend(lvalue)
|
||||
else:
|
||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
||||
|
||||
|
||||
parser = OptionParser(option_class=extendedOption, usage='%prog [options] datafile[s]', description = """
|
||||
Writes version specific files for different MARC releases, adjustes the make file for the spectral solver and optionally compiles the spectral solver
|
||||
|
||||
""" + string.replace('$Id$','\n','\\n')
|
||||
)
|
||||
|
||||
parser.add_option('-c', '--compile', dest='spectralCompile', action='store_true', \
|
||||
help='compiles the spectral solver [%default]')
|
||||
parser.add_option('-o', '--options', dest='makeOptions', action='extend', type='string', \
|
||||
metavar="KEY=VALUE", \
|
||||
help='comma-separated list of options passed to Makefile when compiling spectral code %default')
|
||||
parser.set_defaults(spectralCompile = False)
|
||||
parser.set_defaults(makeOptions = ['F90=ifort'])
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
architectures = {
|
||||
'marc': {
|
||||
'parent': 'DAMASK_marc.f90',
|
||||
'versions' : ['%%MARCVERSION%%','2010','2011','2012'],
|
||||
},
|
||||
}
|
||||
|
||||
bin_link = { \
|
||||
'.' : [
|
||||
'DAMASK_spectral.exe',
|
||||
],
|
||||
}
|
||||
|
||||
damaskEnv = damask.Environment('../../') # script location relative to root
|
||||
baseDir = damaskEnv.relPath('code/')
|
||||
|
||||
for arch in architectures:
|
||||
me = architectures[arch]
|
||||
try:
|
||||
parentFile = open(baseDir+os.sep+me['parent'])
|
||||
parentContent = parentFile.readlines()
|
||||
parentFile.close()
|
||||
except IOError:
|
||||
print 'unable to open',me['parent']
|
||||
continue
|
||||
|
||||
|
||||
for version in me['versions'][1:]:
|
||||
childFile = open(baseDir+os.sep+version.join(os.path.splitext(me['parent'])),'w')
|
||||
for line in parentContent:
|
||||
childFile.write(line.replace(me['versions'][0],version))
|
||||
childFile.close()
|
||||
|
||||
# changing dirs in makefile
|
||||
makefile = open(os.path.join(baseDir,'Makefile'))
|
||||
content = makefile.readlines()
|
||||
makefile.close()
|
||||
makefile = open(os.path.join(baseDir,'Makefile'),'w')
|
||||
for line in content:
|
||||
m = re.match(r'(FFTW|IMKL|ACML|LAPACK)ROOT\s*\?=',line)
|
||||
if m:
|
||||
if m.group(1).lower() in damaskEnv.pathInfo:
|
||||
substitution = damaskEnv.pathInfo[m.group(1).lower()]
|
||||
else:
|
||||
substitution = ''
|
||||
line = '%sROOT ?= %s\n'%(m.group(1),substitution)
|
||||
makefile.write(line)
|
||||
makefile.close()
|
||||
|
||||
# compiling spectral code
|
||||
if (options.spectralCompile):
|
||||
os.system('make --directory %s clean'%(baseDir))
|
||||
os.system('make --directory %s %s'%(baseDir,' '.join(options.makeOptions)))
|
||||
|
||||
# processing symbolic linking list
|
||||
for dir in bin_link:
|
||||
for file in bin_link[dir]:
|
||||
src = os.path.abspath(os.path.join(baseDir,dir,file))
|
||||
if os.path.exists(src):
|
||||
sym_link = os.path.abspath(os.path.join(damaskEnv.binDir(),\
|
||||
{True: dir,
|
||||
False:os.path.splitext(file)[0]}[file == '']))
|
||||
if os.path.lexists(sym_link): os.remove(sym_link)
|
||||
os.symlink(src,sym_link)
|
||||
print sym_link,'-->',src
|
|
@ -1,220 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# Makes postprocessing routines acessible from everywhere.
|
||||
|
||||
import os,sys,glob,string,subprocess
|
||||
from damask import Environment
|
||||
from optparse import OptionParser, Option
|
||||
|
||||
|
||||
# -----------------------------
|
||||
class extendableOption(Option):
|
||||
# -----------------------------
|
||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
||||
|
||||
ACTIONS = Option.ACTIONS + ("extend",)
|
||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
||||
|
||||
def take_action(self, action, dest, opt, value, values, parser):
|
||||
if action == "extend":
|
||||
lvalue = value.split(",")
|
||||
values.ensure_value(dest, []).extend(lvalue)
|
||||
else:
|
||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
||||
|
||||
|
||||
|
||||
########################################################
|
||||
# MAIN
|
||||
########################################################
|
||||
|
||||
parser = OptionParser(option_class=extendableOption, usage='%prog options', description = """
|
||||
Sets up the pre and post processing tools of DAMASK
|
||||
|
||||
""" + string.replace('$Id$','\n','\\n')
|
||||
)
|
||||
|
||||
compilers = ['intel','ifort','intel32','gfortran','gnu95']
|
||||
|
||||
parser.add_option('--F90', '--f90', dest='compiler', type='string', \
|
||||
help='name of F90 compiler')
|
||||
parser.set_defaults(compiler = {True:'ifort',False:'gfortran'}[\
|
||||
subprocess.call(['which', 'ifort'],\
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0])
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
if options.compiler not in compilers:
|
||||
parser.error('compiler switch "--F90" has to be one out of: %s'%(', '.join(compilers)))
|
||||
|
||||
f2py_compiler = {
|
||||
'gfortran': 'gnu95 --f90flags="-fno-range-check -xf95-cpp-input -std=f2008 -fall-intrinsics -DSpectral -fdefault-real-8 -fdefault-double-8 -DFLOAT=8 -DINT=4 -I${DAMASK_ROOT}/lib"',
|
||||
'gnu95': 'gnu95 --f90flags="-fno-range-check -xf95-cpp-input -std=f2008 -fall-intrinsics -DSpectral -fdefault-real-8 -fdefault-double-8 -DFLOAT=8 -DINT=4 -I${DAMASK_ROOT}/lib"',
|
||||
'intel32': 'intel --f90flags="-fpp -stand f03 -diag-disable 5268 -assume byterecl -DSpectral -real-size 64 -integer-size 32 -DFLOAT=8 -DINT=4 -I${DAMASK_ROOT}/lib"',
|
||||
'intel': 'intelem --f90flags="-fpp -stand f03 -diag-disable 5268 -assume byterecl -DSpectral -real-size 64 -integer-size 32 -DFLOAT=8 -DINT=4 -I${DAMASK_ROOT}/lib"',
|
||||
'ifort': 'intelem --f90flags="-fpp -stand f03 -diag-disable 5268 -assume byterecl -DSpectral -real-size 64 -integer-size 32 -DFLOAT=8 -DINT=4 -I${DAMASK_ROOT}/lib"',
|
||||
}[options.compiler]
|
||||
compiler = {
|
||||
'gfortran': 'gfortran',
|
||||
'gnu95': 'gfortran',
|
||||
'intel32': 'ifort',
|
||||
'intel': 'ifort',
|
||||
'ifort': 'ifort',
|
||||
}[options.compiler]
|
||||
|
||||
damaskEnv = Environment()
|
||||
baseDir = damaskEnv.relPath('processing/')
|
||||
codeDir = damaskEnv.relPath('code/')
|
||||
|
||||
if 'ikml' in damaskEnv.pathInfo and damaskEnv.pathInfo['ikml'] != '':
|
||||
lib_lapack = '' # TODO!!
|
||||
elif 'acml' in damaskEnv.pathInfo and damaskEnv.pathInfo['acml'] != '':
|
||||
lib_lapack = '-L%s/%s64/lib -lacml'%(os.path.join(damaskEnv.pathInfo['acml']),compiler) # can we use linker flag?
|
||||
elif 'lapack' in damaskEnv.pathInfo and damaskEnv.pathInfo['lapack'] != '':
|
||||
lib_lapack = '-L%s -llapack'%(damaskEnv.pathInfo['lapack']) # see http://cens.ioc.ee/pipermail/f2py-users/2003-December/000621.html
|
||||
|
||||
#define ToDo list
|
||||
bin_link = { \
|
||||
'pre' : [
|
||||
'marc_addUserOutput.py',
|
||||
'mentat_pbcOnBoxMesh.py',
|
||||
'mentat_spectralBox.py',
|
||||
'OIMang_hex2cub.py',
|
||||
'patchFromReconstructedBoundaries.py',
|
||||
'randomSeeding.py',
|
||||
'geom_fromAng.py',
|
||||
'geom_fromMinimalSurface.py',
|
||||
'geom_fromVoronoiTessellation.py',
|
||||
'geom_canvas.py',
|
||||
'geom_check.py',
|
||||
'geom_rescale.py',
|
||||
'geom_pack.py',
|
||||
'geom_unpack.py',
|
||||
'geom_translate.py',
|
||||
'geom_vicinityOffset.py',
|
||||
'geom_euclideanDistance.py'
|
||||
],
|
||||
'post' : [
|
||||
'3Dvisualize.py',
|
||||
'addCalculation.py',
|
||||
'addCauchy.py',
|
||||
'addCompatibilityMismatch.py',
|
||||
'addCurl.py',
|
||||
'addDeformedConfiguration.py',
|
||||
'addDeterminant.py',
|
||||
'addDeviator.py',
|
||||
'addDivergence.py',
|
||||
'addEhkl.py',
|
||||
'addEuclideanDistance.py',
|
||||
'addMises.py',
|
||||
'addNorm.py',
|
||||
'addPK2.py',
|
||||
'addSpectralDecomposition.py',
|
||||
'addStrainTensors.py',
|
||||
'averageDown.py',
|
||||
'binXY.py',
|
||||
'blowUp.py',
|
||||
'stddevDown.py',
|
||||
'deleteColumn.py',
|
||||
'deleteInfo.py',
|
||||
'filterTable.py',
|
||||
'mentat_colorMap.py',
|
||||
'nodesFromCentroids.py',
|
||||
'perceptualUniformColorMap.py',
|
||||
'postResults.py',
|
||||
'showTable.py',
|
||||
'spectral_parseLog.py',
|
||||
'table2ang',
|
||||
'tagLabel.py',
|
||||
],
|
||||
}
|
||||
|
||||
compile = { \
|
||||
'pre' : [
|
||||
],
|
||||
'post' : [
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
execute = { \
|
||||
'coreModule' : [
|
||||
'make tidy',
|
||||
# The following command is used to compile the fortran files and make the functions defined
|
||||
# in damask.core.pyf available for python in the module core.so
|
||||
# It uses the fortran wrapper f2py that is included in the numpy package to construct the
|
||||
# module core.so out of the fortran code in the f90 files
|
||||
# For the generation of the pyf file use the following lines:
|
||||
###########################################################################
|
||||
#'f2py -h damask.core.pyf' +\
|
||||
#' --overwrite-signature --no-lower prec.f90 DAMASK_spectral_interface.f90 math.f90 mesh.f90',
|
||||
###########################################################################
|
||||
'rm `readlink -f %s`' %(os.path.join(damaskEnv.relPath('lib/damask'),'core.so')),
|
||||
'f2py damask.core.pyf' +\
|
||||
' --build-dir ./' +\
|
||||
' -c --no-lower --fcompiler=%s'%(f2py_compiler) +\
|
||||
' %s'%'prec.f90'+\
|
||||
' %s'%'DAMASK_spectral_interface.f90'+\
|
||||
' %s'%'IO.f90'+\
|
||||
' %s'%'numerics.f90'+\
|
||||
' %s'%'debug.f90'+\
|
||||
' %s'%'math.f90'+\
|
||||
' %s'%'FEsolving.f90'+\
|
||||
' %s'%'mesh.f90'+\
|
||||
' %s'%'spectral_quit.f90'+\
|
||||
' -L%s/lib -lfftw3'%(damaskEnv.pathInfo['fftw'])+\
|
||||
' %s'%lib_lapack,
|
||||
'mv %s `readlink -f %s`' %(os.path.join(codeDir,'core.so'),os.path.join(damaskEnv.relPath('lib/damask'),'core.so')),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
for dir in compile: #there is nothing to compile at the moment
|
||||
for file in compile[dir]:
|
||||
src = os.path.abspath(os.path.join(baseDir,dir,file))
|
||||
if os.path.isfile(src):
|
||||
try:
|
||||
os.system('rm %s.exe'%(os.path.splitext(src)[0]))
|
||||
print 'removing %s.exe '%(os.path.splitext(src)[0])
|
||||
except:
|
||||
pass
|
||||
print 'compiling ',src,'using',compiler
|
||||
os.system('%s -O2 -o %s.exe %s'%(compiler,os.path.splitext(src)[0],src))
|
||||
|
||||
os.chdir(codeDir) # needed for compilation with gfortran and f2py
|
||||
for tasks in execute:
|
||||
for cmd in execute[tasks]:
|
||||
try:
|
||||
print 'executing...:',cmd
|
||||
os.system(cmd)
|
||||
except:
|
||||
print 'failed..!'
|
||||
pass
|
||||
|
||||
os.chdir(damaskEnv.relPath('processing/setup/'))
|
||||
modules = glob.glob('*.mod')
|
||||
for module in modules:
|
||||
print 'removing', module
|
||||
os.remove(module)
|
||||
|
||||
for dir in bin_link:
|
||||
for file in bin_link[dir]:
|
||||
src = os.path.abspath(os.path.join(baseDir,dir,file))
|
||||
if (file == ''):
|
||||
sym_link = os.path.abspath(os.path.join(damaskEnv.binDir(),dir))
|
||||
else:
|
||||
sym_link = os.path.abspath(os.path.join(damaskEnv.binDir(),os.path.splitext(file)[0]))
|
||||
print sym_link,'-->',src
|
||||
if os.path.lexists(sym_link):
|
||||
os.remove(sym_link)
|
||||
os.symlink(src,sym_link)
|
||||
|
||||
#check if compilation of core module was successful
|
||||
try:
|
||||
with open(damaskEnv.relPath('lib/damask/core.so')) as f: pass
|
||||
except IOError as e:
|
||||
print '*********\n* core.so not found, compilation of core modules was not successful\n*********'
|
||||
sys.exit()
|
||||
f.close
|
Loading…
Reference in New Issue