Merge branch 'development' of magit1.mpie.de:damask/DAMASK into miscImprovements

This commit is contained in:
Martin Diehl 2016-09-03 14:35:53 +02:00
commit c05307a273
12 changed files with 474 additions and 298 deletions

View File

@ -1 +1 @@
v2.0.1-43-g64ac05f
v2.0.1-59-g4b02a55

View File

@ -150,6 +150,7 @@ program DAMASK_spectral
MPI_file_get_position, &
MPI_file_write, &
MPI_abort, &
MPI_finalize, &
MPI_allreduce, &
PETScFinalize
@ -230,8 +231,6 @@ program DAMASK_spectral
endif
do j = 1_pInt, 9_pInt
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a *
enddo
do j = 1_pInt,9_pInt
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
enddo
loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation
@ -243,8 +242,6 @@ program DAMASK_spectral
temp_valueVector = 0.0_pReal
do j = 1_pInt, 9_pInt
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
enddo
do j = 1_pInt,9_pInt
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
enddo
loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3]))
@ -301,14 +298,14 @@ program DAMASK_spectral
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
write(6,'(2x,a)') 'drop guessing along trajectory'
if (loadCases(currentLoadCase)%deformation%myType=='l') then
if (loadCases(currentLoadCase)%deformation%myType == 'l') then
do j = 1_pInt, 3_pInt
if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) &
errorID = 832_pInt ! each row should be either fully or not at all defined
enddo
write(6,'(2x,a)') 'velocity gradient:'
else if (loadCases(currentLoadCase)%deformation%myType=='f') then
else if (loadCases(currentLoadCase)%deformation%myType == 'f') then
write(6,'(2x,a)') 'deformation gradient at end of load case:'
else
write(6,'(2x,a)') 'deformation gradient rate:'
@ -317,12 +314,12 @@ program DAMASK_spectral
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j)
else
write(6,'(2x,12a)',advance='no') ' * '
write(6,'(2x,12a)',advance='no') ' * '
endif
enddo; write(6,'(/)',advance='no')
enddo
if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. &
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
if (any(loadCases(currentLoadCase)%P%maskLogical .and. &
transpose(loadCases(currentLoadCase)%P%maskLogical) .and. &
reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) &
@ -332,12 +329,12 @@ program DAMASK_spectral
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
else
write(6,'(2x,12a)',advance='no') ' * '
write(6,'(2x,12a)',advance='no') ' * '
endif
enddo; write(6,'(/)',advance='no')
enddo
if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, &
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >&
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > &
reshape(spread(tol_math_check,1,9),[ 3,3]))&
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
@ -377,7 +374,7 @@ program DAMASK_spectral
call Polarisation_init
case default
call IO_error(error_ID = 891, ext_msg = trim(spectral_solver))
call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver))
end select
@ -427,29 +424,30 @@ program DAMASK_spectral
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
call MPI_file_open(PETSC_COMM_WORLD, &
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
MPI_INFO_NULL, &
resUnit, &
ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open')
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position')
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit, &
reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, &
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if (worldrank == 0) &
@ -488,7 +486,7 @@ program DAMASK_spectral
endif
timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step
forwarding: if(totalIncsCounter >= restartInc) then
forwarding: if (totalIncsCounter >= restartInc) then
stepFraction = 0_pInt
!--------------------------------------------------------------------------------------------------
@ -594,7 +592,7 @@ program DAMASK_spectral
guess,timeinc,timeIncOld,remainingLoadCaseTime)
end select
if(.not. solres(field)%converged) exit ! no solution found
if (.not. solres(field)%converged) exit ! no solution found
enddo
stagIter = stagIter + 1_pInt
stagIterate = stagIter < stagItMax .and. &
@ -709,6 +707,13 @@ program DAMASK_spectral
call PETScFinalize(ierr); CHKERRQ(ierr)
#ifdef _OPENMP
call MPI_finalize(i)
if (i /= 0_pInt) then
call IO_error(error_ID=894, el=i, ext_msg="Finalize()")
endif
#endif
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
call quit(0_pInt) ! no complains ;)

View File

@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\
#-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
#-Wstrict-overflow:
DEBUG_OPTIONS_gfortran :=-g\
-fbacktrace\
-fdump-core\
-fcheck=all\
DEBUG_OPTIONS_gfortran :=-g \
-fbacktrace \
-fdump-core \
-fcheck=all \
-ffpe-trap=invalid,zero,overflow
###################################################################################################
@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$
COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90))
###################################################################################################
SOURCE_FILES = \
source_thermal_dissipation.o source_thermal_externalheat.o \
source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \
source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o
source_thermal_dissipation.o \
source_thermal_externalheat.o \
source_damage_isoBrittle.o \
source_damage_isoDuctile.o \
source_damage_anisoBrittle.o \
source_damage_anisoDuctile.o \
source_vacancy_phenoplasticity.o \
source_vacancy_irradiation.o \
source_vacancy_thermalfluc.o
KINEMATICS_FILES = \
kinematics_cleavage_opening.o kinematics_slipplane_opening.o \
kinematics_cleavage_opening.o \
kinematics_slipplane_opening.o \
kinematics_thermal_expansion.o \
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o
kinematics_vacancy_strain.o \
kinematics_hydrogen_strain.o
PLASTIC_FILES = \
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \
plastic_dislotwin.o \
plastic_disloUCLA.o \
plastic_isotropic.o \
plastic_phenopowerlaw.o \
plastic_titanmod.o \
plastic_nonlocal.o \
plastic_none.o \
plastic_phenoplus.o
THERMAL_FILES = \
thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o
thermal_isothermal.o \
thermal_adiabatic.o \
thermal_conduction.o
DAMAGE_FILES = \
damage_none.o damage_local.o damage_nonlocal.o
damage_none.o \
damage_local.o \
damage_nonlocal.o
VACANCYFLUX_FILES = \
vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o
vacancyflux_isoconc.o \
vacancyflux_isochempot.o \
vacancyflux_cahnhilliard.o
POROSITY_FILES = \
porosity_none.o porosity_phasefield.o
porosity_none.o \
porosity_phasefield.o
HYDROGENFLUX_FILES = \
hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o
hydrogenflux_isoconc.o \
hydrogenflux_cahnhilliard.o
HOMOGENIZATION_FILES = \
homogenization_RGC.o homogenization_isostrain.o homogenization_none.o
homogenization_RGC.o \
homogenization_isostrain.o \
homogenization_none.o
#####################
# Spectral Solver
@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90
SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \
spectral_thermal.o spectral_damage.o
SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \
FEsolving.o mesh.o material.o lattice.o \
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
SPECTRAL_FILES = C_routines.o \
system_routines.o \
prec.o \
DAMASK_interface.o \
IO.o \
numerics.o \
debug.o \
math.o \
FEsolving.o \
mesh.o \
material.o \
lattice.o \
$(SOURCE_FILES) \
$(KINEMATICS_FILES) \
$(PLASTIC_FILES) \
constitutive.o \
crystallite.o \
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
$(THERMAL_FILES) \
$(DAMAGE_FILES) \
$(VACANCYFLUX_FILES) \
$(HYDROGENFLUX_FILES) \
$(POROSITY_FILES) \
$(HOMOGENIZATION_FILES) homogenization.o \
CPFEM2.o \
spectral_utilities.o \
@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./
FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o
FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \
FEsolving.o mesh.o material.o lattice.o \
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
FEM_FILES = prec.o \
DAMASK_interface.o \
FEZoo.o \
IO.o \
numerics.o \
debug.o \
math.o \
FEsolving.o \
mesh.o \
material.o \
lattice.o \
$(SOURCE_FILES) \
$(KINEMATICS_FILES) \
$(PLASTIC_FILES) \
constitutive.o \
crystallite.o \
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
$(THERMAL_FILES) \
$(DAMAGE_FILES) \
$(VACANCYFLUX_FILES) \
$(HYDROGENFLUX_FILES) \
$(POROSITY_FILES) \
$(HOMOGENIZATION_FILES) homogenization.o \
CPFEM.o \
FEM_utilities.o $(FEM_SOLVER_FILES)
FEM_utilities.o \
$(FEM_SOLVER_FILES)
DAMASK_FEM.exe: DAMASK_FEM_driver.o
$(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \
@ -658,8 +715,8 @@ tidy:
@rm -rf *.inst.f90 # for instrumentation
@rm -rf *.pomp.f90 # for instrumentation
@rm -rf *.pp.f90 # for instrumentation
@rm -rf *.pdb # for instrumnentation
@rm -rf *.opari.inc # for instrumnentation
@rm -rf *.pdb # for instrumentation
@rm -rf *.opari.inc # for instrumentation
.PHONY: cleanDAMASK
cleanDAMASK:

View File

@ -11,7 +11,15 @@ bin_link = { \
],
}
MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015]
MarcReleases =[ \
'2011',
'2012',
'2013',
'2013.1',
'2014',
'2014.2',
'2015',
]
damaskEnv = damask.Environment()
baseDir = damaskEnv.relPath('code/')
@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN']
if not os.path.isdir(binDir):
os.mkdir(binDir)
for dir in bin_link:
for file in bin_link[dir]:
src = os.path.abspath(os.path.join(baseDir,dir,file))
if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(binDir,\
{True: dir,
False:os.path.splitext(file)[0]}[file == '']))
if os.path.lexists(sym_link): os.remove(sym_link)
os.symlink(src,sym_link)
sys.stdout.write(sym_link+' -> '+src+'\n')
sys.stdout.write('\nsymbolic linking...\n')
for subDir in bin_link:
theDir = os.path.abspath(os.path.join(baseDir,subDir))
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
for theFile in bin_link[subDir]:
theName,theExt = os.path.splitext(theFile)
src = os.path.abspath(os.path.join(theDir,theFile))
if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName))
if os.path.lexists(sym_link):
os.remove(sym_link)
output = theName+damask.util.deemph(theExt)
else:
output = damask.util.emph(theName)+damask.util.deemph(theExt)
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
os.symlink(src,sym_link)
sys.stdout.write('\nMSC.Marc versioning...\n\n')
theMaster = 'DAMASK_marc.f90'
for version in MarcReleases:
src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90'))
src = os.path.abspath(os.path.join(baseDir,theMaster))
if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90'))
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version)))
if os.path.lexists(sym_link):
os.remove(sym_link)
sys.stdout.write(sym_link)
output = version
else:
sys.stdout.write(damask.util.emph(sym_link))
output = damask.util.emph(version)
os.symlink(src,sym_link)
sys.stdout.write(' -> '+src+'\n')
sys.stdout.write(' '+output+'\n')
os.symlink(theMaster,sym_link)

View File

@ -13,23 +13,47 @@ if not os.path.isdir(binDir):
os.mkdir(binDir)
#define ToDo list
processing_subDirs = ['pre','post','misc',]
processing_extensions = ['.py','.sh',]
processing_subDirs = ['pre',
'post',
'misc',
]
processing_extensions = ['.py',
'.sh',
]
sys.stdout.write('\nsymbolic linking...\n')
for subDir in processing_subDirs:
theDir = os.path.abspath(os.path.join(baseDir,subDir))
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
for theFile in os.listdir(theDir):
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
theName,theExt = os.path.splitext(theFile)
if theExt in processing_extensions: # only consider files with proper extensions
src = os.path.abspath(os.path.join(theDir,theFile))
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))
sym_link = os.path.abspath(os.path.join(binDir,theName))
if os.path.lexists(sym_link):
os.remove(sym_link)
sys.stdout.write(sym_link)
output = theName+damask.util.deemph(theExt)
else:
sys.stdout.write(damask.util.emph(sym_link))
output = damask.util.emph(theName)+damask.util.deemph(theExt)
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
os.symlink(src,sym_link)
sys.stdout.write(' -> '+src+'\n')
sys.stdout.write('\npruning broken links...\n')
brokenLinks = 0
for filename in os.listdir(binDir):
path = os.path.join(binDir,filename)
if os.path.islink(path) and not os.path.exists(path):
sys.stdout.write(' '+damask.util.delete(path)+'\n')
os.remove(path)
brokenLinks += 1
sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n')

View File

@ -17,40 +17,47 @@ class Test():
variants = []
def __init__(self,test_description):
def __init__(self,description = ''):
logger = logging.getLogger()
logger.setLevel(0)
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
fh.setLevel(logging.DEBUG)
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
fh.setFormatter(full)
fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s'))
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
ch.setLevel(logging.INFO)
# create formatter and add it to the handlers
plain = logging.Formatter('%(message)s')
ch.setFormatter(plain)
# add the handlers to the logger
ch.setFormatter(logging.Formatter('%(message)s'))
logger = logging.getLogger()
logger.addHandler(fh)
logger.addHandler(ch)
logger.setLevel(0)
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \
+'----------------------------------------------------------------\n' \
+'| '+test_description+'\n' \
+'----------------------------------------------------------------')
logging.info('\n'.join(['+'*40,
'-'*40,
'| '+description,
'-'*40,
]))
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
self.parser = OptionParser(
description = test_description+' (using class: {})'.format(damask.version),
usage='./test.py [options]')
self.updateRequested = False
self.parser.add_option("-d", "--debug", action="store_true",\
dest="debug",\
help="debug run, don't calculate but use existing results")
self.parser.add_option("-p", "--pass", action="store_true",\
dest="accept",\
help="calculate results but always consider test as successfull")
self.parser.set_defaults(debug=False,
accept=False)
self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version),
usage = './test.py [options]')
self.parser.add_option("-d", "--debug",
action = "store_true",
dest = "debug",
help = "debug run, don't calculate but use existing results")
self.parser.add_option("-p", "--pass",
action = "store_true",
dest = "accept",
help = "calculate results but always consider test as successfull")
self.parser.add_option("-u", "--update",
action = "store_true",
dest = "update",
help = "use current test results as new reference"
)
self.parser.set_defaults(debug = False,
accept = False,
update = False,
)
def execute(self):
"""Run all variants and report first failure."""
@ -65,15 +72,17 @@ class Test():
return variant+1 # return culprit
return 0
else:
if not self.testPossible(): return -1
if not self.feasible(): return -1
self.clean()
self.prepareAll()
for variant in xrange(len(self.variants)):
for variant,name in enumerate(self.variants):
try:
self.prepare(variant)
self.run(variant)
self.postprocess(variant)
if self.updateRequested: # update requested
if self.options.update: # update requested
self.update(variant)
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
return variant+1 # return culprit
@ -82,8 +91,8 @@ class Test():
return variant+1 # return culprit
return 0
def testPossible(self):
"""Check if test is possible or not (e.g. no license available)."""
def feasible(self):
"""Check whether test is possible or not (e.g. no license available)."""
return True
def clean(self):
@ -458,17 +467,13 @@ class Test():
columns = [None], # list of list of column labels (per file)
rtol = 1e-5,
atol = 1e-8,
preFilter = -1.0,
postFilter = -1.0,
debug = False):
"""
compare tables with np.allclose
threshold can be used to ignore small values (a negative number disables this feature)
"""
"""compare tables with np.allclose"""
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
files = [str(files)]
if len(files) < 2: return True # single table is always close to itself...
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
for table in tables:
table.head_read()
@ -477,7 +482,7 @@ class Test():
columns = columns[:len(files)] # truncate to same length as files
for i,column in enumerate(columns):
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, use all
logging.info('comparing ASCIItables')
for i in xrange(len(columns)):
@ -487,37 +492,37 @@ class Test():
)
logging.info(files[i]+':'+','.join(columns[i]))
if len(files) < 2: return True # single table is always close to itself...
maximum = np.zeros(len(columns[0]),dtype='f')
data = []
for table,labels in zip(tables,columns):
# peek into the ASCII table to figure out real table size
# the cryptic table header does not share the same size as real
# table
table.data_readArray(columns[0])
maximum = np.zeros(table.data.shape[1], dtype='f')
data = [] # list of feature table extracted from each file (ASCII table)
for table, labels in zip(tables, columns):
table.data_readArray(labels)
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data))
maximum += np.abs(table.data).max(axis=0)
for label in labels:
idx = table.label_indexrange(label)
maximum[idx] = np.maximum(maximum[idx],
np.amax(np.linalg.norm(table.data[:,idx],axis=1)))
data.append(table.data)
table.close()
maximum /= len(tables)
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
maximum = np.where(maximum > 0.0, maximum, 1) # avoid div by zero for empty columns
# normalize each table
for i in xrange(len(data)):
data[i] /= maximum
mask = np.zeros_like(table.data,dtype='bool')
for table in data:
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
if debug:
logging.debug(str(maximum))
allclose = np.absolute(data[0]-data[1]) <= (atol + rtol*np.absolute(data[1]))
for ok,valA,valB in zip(allclose,data[0],data[1]):
logging.debug('{}:\n {}\n{}\n'.format(ok,valA,valB))
allclose = True # start optimistic
for i in xrange(1,len(data)):
if debug:
t0 = np.where(mask,0.0,data[i-1])
t1 = np.where(mask,0.0,data[i ])
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
allclose &= np.allclose(data[i-1],data[i],rtol,atol) # accumulate "pessimism"
return allclose
@ -545,14 +550,16 @@ class Test():
def report_Success(self,culprit):
ret = culprit
if culprit == 0:
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed')
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
return 0
if culprit == -1:
logging.warning('Warning: Could not start test')
return 0
msg = 'The test passed' if len(self.variants) == 1 \
else 'All {} tests passed.'.format(len(self.variants))
elif culprit == -1:
msg = 'Warning: Could not start test...'
ret = 0
else:
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit))
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
return culprit
msg = ' * Test "{}" failed.'.format(self.variants[culprit-1])
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
return ret

View File

@ -20,6 +20,7 @@ class bcolors:
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
DIM = '\033[2m'
UNDERLINE = '\033[4m'
def disable(self):
@ -70,9 +71,19 @@ def report_geom(info,
# -----------------------------
def emph(what):
"""emphasizes string on screen"""
"""boldens string"""
return bcolors.BOLD+srepr(what)+bcolors.ENDC
# -----------------------------
def deemph(what):
"""dims string"""
return bcolors.DIM+srepr(what)+bcolors.ENDC
# -----------------------------
def delete(what):
"""dims string"""
return bcolors.DIM+srepr(what)+bcolors.ENDC
# -----------------------------
def execute(cmd,
streamIn = None,

View File

@ -2,6 +2,7 @@
# -*- coding: UTF-8 no BOM -*-
import os,sys
import math # noqa
import numpy as np
from optparse import OptionParser
import damask
@ -14,7 +15,7 @@ scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Replace all rows for which column 'label' has identical values by a single row containing their average.
Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row.
Output table will contain as many rows as there are different (unique) values in the grouping column.
Examples:
@ -25,11 +26,33 @@ parser.add_option('-l','--label',
dest = 'label',
type = 'string', metavar = 'string',
help = 'column label for grouping rows')
parser.add_option('-f','--function',
dest = 'function',
type = 'string', metavar = 'string',
help = 'mapping function [%default]')
parser.add_option('-a','--all',
dest = 'all',
action = 'store_true',
help = 'apply mapping function also to grouping column')
parser.set_defaults(function = 'np.average')
(options,filenames) = parser.parse_args()
funcModule,funcName = options.function.split('.')
try:
mapFunction = getattr(locals().get(funcModule) or
globals().get(funcModule) or
__import__(funcModule),
funcName)
except:
mapFunction = None
if options.label is None:
parser.error('no grouping column specified.')
if not hasattr(mapFunction,'__call__'):
parser.error('function "{}" is not callable.'.format(options.function))
# --- loop over input files -------------------------------------------------------------------------
@ -38,10 +61,6 @@ if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.join(
os.path.split(name)[0],
options.label+'_averaged_'+os.path.split(name)[1]
) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -53,6 +72,8 @@ for name in filenames:
damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
else:
grpColumn = table.label_index(options.label)
# ------------------------------------------ assemble info ---------------------------------------
@ -64,17 +85,17 @@ for name in filenames:
table.data_readArray()
rows,cols = table.data.shape
table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])]
table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True)
index = np.append(index,rows)
avgTable = np.empty((len(values), cols))
values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
index = np.append(index,rows) # add termination position
grpTable = np.empty((len(values), cols)) # initialize output
for j in xrange(cols) :
for i in xrange(len(values)) :
avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j])
for i in xrange(len(values)): # iterate over groups (unique values in grpColumn)
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
table.data = avgTable
table.data = grpTable
# ------------------------------------------ output result -------------------------------

View File

@ -3,6 +3,7 @@
import os,vtk
import damask
import numpy as np
from collections import defaultdict
from optparse import OptionParser
@ -37,12 +38,17 @@ parser.add_option('-v', '--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'vector value label(s)')
parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color', dest='color', action='extend',
metavar ='<string LIST>',
help = 'RGB color tuples')
parser.set_defaults(scalar = [],
vector = [],
tensor = [],
color = [],
inplace = False,
render = False,
@ -97,6 +103,7 @@ for name in filenames:
for datatype,dimension,label in [['scalar',1,options.scalar],
['vector',3,options.vector],
['tensor',9,options.tensor],
['color',3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
@ -107,7 +114,7 @@ for name in filenames:
remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me)
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
if datatype in ['scalar','vector', 'tensor']: VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension)
@ -125,10 +132,14 @@ for name in filenames:
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
theData = [float(table.data[i]) for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*x),theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(theData[0])
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*theData)
elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+
np.array(theData) \
.reshape(3,3).T \
.reshape(9)))
table.input_close() # close input ASCII table

View File

@ -3,6 +3,7 @@
import os,vtk
import damask
import numpy as np
from collections import defaultdict
from optparse import OptionParser
@ -38,6 +39,10 @@ parser.add_option('-v', '--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'vector value label(s)')
parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color',
dest = 'color',
action = 'extend', metavar = '<string LIST>',
@ -45,6 +50,7 @@ parser.add_option('-c', '--color',
parser.set_defaults(scalar = [],
vector = [],
tensor = [],
color = [],
inplace = False,
render = False,
@ -95,6 +101,7 @@ for name in filenames:
for datatype,dimension,label in [['scalar',1,options.scalar],
['vector',3,options.vector],
['tensor',9,options.tensor],
['color',3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
@ -105,7 +112,7 @@ for name in filenames:
remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me)
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
if datatype in ['scalar','vector','tensor']: VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension)
@ -128,8 +135,12 @@ for name in filenames:
for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+
np.array(theData) \
.reshape(3,3).T \
.reshape(9)))
table.close() # close input ASCII table

View File

@ -79,9 +79,9 @@ for name in filenames:
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
[3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)]
[3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)]
grid = np.array(map(len,coords),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod()

View File

@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute',
dest = 'substitute',
action = 'extend', metavar = '<string LIST>',
help = 'substitutions of microstructure indices from,to,from,to,...')
parser.add_option('--float',
dest = 'real',
action = 'store_true',
help = 'use float input')
parser.set_defaults(origin = (0.0,0.0,0.0),
microstructure = 0,
substitute = [],
real = False,
)
(options, filenames) = parser.parse_args()
datatype = 'f' if options.real else 'i'
sub = {}
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
# --- loop over input files -------------------------------------------------------------------------
# --- loop over input files ----------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
# --- interpret header ---------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
@ -73,9 +80,9 @@ for name in filenames:
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
# --- read data ----------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']) # read microstructure
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
# --- do work ------------------------------------------------------------------------------------
@ -90,9 +97,9 @@ for name in filenames:
substituted += options.microstructure # shift microstructure indices
newInfo['origin'] = info['origin'] + options.origin
newInfo['microstructures'] = substituted.max()
newInfo['microstructures'] = len(np.unique(substituted))
# --- report ---------------------------------------------------------------------------------------
# --- report -------------------------------------------------------------------------------------
remarks = []
if (any(newInfo['origin'] != info['origin'])):
@ -101,7 +108,7 @@ for name in filenames:
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if remarks != []: damask.util.croak(remarks)
# --- write header ---------------------------------------------------------------------------------
# --- write header -------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
@ -116,12 +123,12 @@ for name in filenames:
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
# --- write microstructure information -----------------------------------------------------------
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1)))
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
table.data_writeArray(format,delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
# --- output finalization ------------------------------------------------------------------------
table.close() # close ASCII table
table.close() # close ASCII table