From 759126b906f7e9e165fe4757893eff64ea2d6e83 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 2 Jul 2016 13:28:32 +0200 Subject: [PATCH 01/88] compatible to python3 --- lib/damask/__init__.py | 2 +- lib/damask/asciitable.py | 40 ++++++++++++++++++++------------- lib/damask/colormaps.py | 24 ++++++++++---------- lib/damask/config/material.py | 10 ++++----- lib/damask/environment.py | 2 +- lib/damask/geometry/geometry.py | 2 +- lib/damask/solver/marc.py | 4 ++-- lib/damask/solver/solver.py | 2 +- 8 files changed, 47 insertions(+), 39 deletions(-) diff --git a/lib/damask/__init__.py b/lib/damask/__init__.py index 3edaa9efa..e84cbaa17 100644 --- a/lib/damask/__init__.py +++ b/lib/damask/__init__.py @@ -12,7 +12,7 @@ from .config import Material # noqa from .colormaps import Colormap, Color # noqa try: from .corientation import Quaternion, Rodrigues, Symmetry, Orientation # noqa - print "Import Cython version of Orientation module" + print("Import Cython version of Orientation module") except: from .orientation import Quaternion, Rodrigues, Symmetry, Orientation # noqa #from .block import Block # only one class diff --git a/lib/damask/asciitable.py b/lib/damask/asciitable.py index 4fe4f9156..210d596ec 100644 --- a/lib/damask/asciitable.py +++ b/lib/damask/asciitable.py @@ -3,6 +3,14 @@ import os,sys import numpy as np +# ------------------------------------------------------------------ +# python 3 has no unicode object, this ensures that the code works on Python 2&3 +try: + test=isinstance('test', unicode) +except(NameError): + unicode=str + +# ------------------------------------------------------------------ class ASCIItable(): """Read and write to ASCII tables""" @@ -158,12 +166,12 @@ class ASCIItable(): if self.__IO__['labeled']: # table features labels - self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))] + self.info = [self.__IO__['in'].readline().strip() for i in range(1,int(m.group(1)))] self.tags = shlex.split(self.__IO__['in'].readline()) # store tags found in last line else: - self.info = [self.__IO__['in'].readline().strip() for i in xrange(0,int(m.group(1)))] # all header is info ... + self.info = [self.__IO__['in'].readline().strip() for i in range(0,int(m.group(1)))] # all header is info ... else: # other table format try: @@ -224,11 +232,11 @@ class ASCIItable(): extra_header = [] for header in self.info: - headitems = map(str.lower,header.split()) + headitems = list(map(str.lower,header.split())) if len(headitems) == 0: continue # skip blank lines - if headitems[0] in mappings.keys(): - if headitems[0] in identifiers.keys(): - for i in xrange(len(identifiers[headitems[0]])): + if headitems[0] in list(mappings.keys()): + if headitems[0] in list(identifiers.keys()): + for i in range(len(identifiers[headitems[0]])): info[headitems[0]][i] = \ mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1]) else: @@ -415,9 +423,9 @@ class ASCIItable(): start = self.label_index(labels) dim = self.label_dimension(labels) - return np.hstack(map(lambda c: xrange(c[0],c[0]+c[1]), zip(start,dim))) \ + return np.hstack([range(c[0],c[0]+c[1]) for c in zip(start,dim)]) \ if isinstance(labels, Iterable) and not isinstance(labels, str) \ - else xrange(start,start+dim) + else range(start,start+dim) # ------------------------------------------------------------------ def info_append(self, @@ -447,7 +455,7 @@ class ASCIItable(): def data_skipLines(self, count): """wind forward by count number of lines""" - for i in xrange(count): + for i in range(count): alive = self.data_read() return alive @@ -501,9 +509,9 @@ class ASCIItable(): columns = [] for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ... # ... transparently add all components unless column referenced by number or with explicit dimension - columns += range(c,c + \ + columns += list(range(c,c + \ (d if str(c) != str(labels[present[i]]) else \ - 1)) + 1))) use = np.array(columns) self.tags = list(np.array(self.tags)[use]) # update labels with valid subset @@ -530,7 +538,7 @@ class ASCIItable(): """write whole numpy array data""" for row in self.data: try: - output = [fmt % value for value in row] if fmt else map(repr,row) + output = [fmt % value for value in row] if fmt else list(map(repr,row)) except: output = [fmt % row] if fmt else [repr(row)] @@ -555,7 +563,7 @@ class ASCIItable(): try: idx = self.label_index(where) if len(self.data) <= idx: - self.data_append(['n/a' for i in xrange(idx+1-len(self.data))]) # grow data if too short + self.data_append(['n/a' for i in range(idx+1-len(self.data))]) # grow data if too short self.data[idx] = str(what) except(ValueError): pass @@ -568,7 +576,7 @@ class ASCIItable(): # ------------------------------------------------------------------ def data_asFloat(self): - return map(self._transliterateToFloat,self.data) + return list(map(self._transliterateToFloat,self.data)) @@ -590,8 +598,8 @@ class ASCIItable(): if len(items) > 2: if items[1].lower() == 'of': items = np.ones(datatype(items[0]))*datatype(items[2]) elif items[1].lower() == 'to': items = np.arange(datatype(items[0]),1+datatype(items[2])) - else: items = map(datatype,items) - else: items = map(datatype,items) + else: items = list(map(datatype,items)) + else: items = list(map(datatype,items)) s = min(len(items), N-i) # prevent overflow of microstructure array microstructure[i:i+s] = items[:s] diff --git a/lib/damask/colormaps.py b/lib/damask/colormaps.py index b0063d76e..6e33613e0 100644 --- a/lib/damask/colormaps.py +++ b/lib/damask/colormaps.py @@ -33,7 +33,7 @@ class Color(): } model = model.upper() - if model not in self.__transforms__.keys(): model = 'RGB' + if model not in list(self.__transforms__.keys()): model = 'RGB' if model == 'RGB' and max(color) > 1.0: # are we RGB255 ? for i in range(3): color[i] /= 255.0 # rescale to RGB @@ -62,7 +62,7 @@ class Color(): # ------------------------------------------------------------------ def convertTo(self,toModel = 'RGB'): toModel = toModel.upper() - if toModel not in self.__transforms__.keys(): return + if toModel not in list(self.__transforms__.keys()): return sourcePos = self.__transforms__[self.model]['index'] targetPos = self.__transforms__[toModel]['index'] @@ -139,7 +139,7 @@ class Color(): HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values if (HSL[0] < 0.0): HSL[0] = HSL[0] + 360.0 - for i in xrange(2): + for i in range(2): HSL[i+1] = min(HSL[i+1],1.0) HSL[i+1] = max(HSL[i+1],0.0) @@ -164,11 +164,11 @@ class Color(): [0.212671,0.715160,0.072169], [0.019334,0.119193,0.950227]]) - for i in xrange(3): + for i in range(3): if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4 else: RGB_lin[i] = self.color[i] /12.92 XYZ = np.dot(convert,RGB_lin) - for i in xrange(3): + for i in range(3): XYZ[i] = max(XYZ[i],0.0) @@ -193,10 +193,10 @@ class Color(): RGB_lin = np.dot(convert,self.color) RGB = np.zeros(3,'d') - for i in xrange(3): + for i in range(3): if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555 else: RGB[i] = RGB_lin[i] *12.92 - for i in xrange(3): + for i in range(3): RGB[i] = min(RGB[i],1.0) RGB[i] = max(RGB[i],0.0) @@ -225,7 +225,7 @@ class Color(): XYZ[0] = XYZ[1] + self.color[1]/ 500.0 XYZ[2] = XYZ[1] - self.color[2]/ 200.0 - for i in xrange(len(XYZ)): + for i in range(len(XYZ)): if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3. else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.) @@ -245,7 +245,7 @@ class Color(): ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65 XYZ = self.color/ref_white - for i in xrange(len(XYZ)): + for i in range(len(XYZ)): if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0) else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0 @@ -451,7 +451,7 @@ class Colormap(): """ format = format.lower() # consistent comparison basis frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions - colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)] + colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in range(steps)] if format == 'paraview': colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \ + [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],) @@ -461,7 +461,7 @@ class Colormap(): elif format == 'gmsh': colormap = ['View.ColorTable = {'] \ - + [',\n'.join(['{%s}'%(','.join(map(lambda x:str(x*255.0),color))) for color in colors])] \ + + [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \ + ['}'] elif format == 'gom': @@ -469,7 +469,7 @@ class Colormap(): + ' 9 ' + str(name) \ + ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \ + '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \ - + ' '.join([' 0 %s 255 1'%(' '.join(map(lambda x:str(int(x*255.0)),color))) for color in reversed(colors)])] + + ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])] elif format == 'raw': colormap = ['\t'.join(map(str,color)) for color in colors] diff --git a/lib/damask/config/material.py b/lib/damask/config/material.py index c1c3ad6b4..f586ee098 100644 --- a/lib/damask/config/material.py +++ b/lib/damask/config/material.py @@ -19,7 +19,7 @@ class Section(): self.parameters[key] = data[key] if '__order__' not in self.parameters: - self.parameters['__order__'] = self.parameters.keys() + self.parameters['__order__'] = list(self.parameters.keys()) if part.lower() in classes: self.__class__ = classes[part.lower()] self.__init__(data) @@ -61,11 +61,11 @@ class Texture(Section): def add_component(self,theType,properties): - if 'scatter' not in map(str.lower,properties.keys()): + if 'scatter' not in list(map(str.lower,list(properties.keys()))): scatter = 0.0 else: scatter = properties['scatter'] - if 'fraction' not in map(str.lower,properties.keys()): + if 'fraction' not in list(map(str.lower,list(properties.keys()))): fraction = 1.0 else: fraction = properties['fraction'] @@ -224,10 +224,10 @@ class Material(): def add_microstructure(self, section='', components={}, # dict of phase,texture, and fraction lists ): - """Experimental! Needs expansion to multi-constituent microstructures...""" + """Experimental! Needs expansion to multi-constituent microstructures...""" microstructure = Microstructure() # make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python) - components=dict((k.lower(), v) for k,v in components.iteritems()) + components=dict((k.lower(), v) for k,v in components.items()) for key in ['phase','texture','fraction','crystallite']: if type(components[key]) is not list: diff --git a/lib/damask/environment.py b/lib/damask/environment.py index 8ceb093dd..850cf3da6 100644 --- a/lib/damask/environment.py +++ b/lib/damask/environment.py @@ -41,7 +41,7 @@ class Environment(): try: cmd = """ ssh mulicense2 "/Stat_Flexlm | grep 'Users of %s: ' | cut -d' ' -f7,13" """%software process = subprocess.Popen(shlex.split(cmd),stdout = subprocess.PIPE,stderr = subprocess.PIPE) - licenses = map(int, process.stdout.readline().split()) + licenses = list(map(int, process.stdout.readline().split())) try: if licenses[0]-licenses[1] >= Nneeded: return 0 diff --git a/lib/damask/geometry/geometry.py b/lib/damask/geometry/geometry.py index cfefa51aa..0976299e3 100644 --- a/lib/damask/geometry/geometry.py +++ b/lib/damask/geometry/geometry.py @@ -15,7 +15,7 @@ class Geometry(): 'spectral': damask.geometry.Spectral, 'marc': damask.geometry.Marc, } - if solver.lower() in solverClass.keys(): + if solver.lower() in list(solverClass.keys()): self.__class__=solverClass[solver.lower()] self.__init__() diff --git a/lib/damask/solver/marc.py b/lib/damask/solver/marc.py index e693783f6..693bc8d87 100644 --- a/lib/damask/solver/marc.py +++ b/lib/damask/solver/marc.py @@ -25,7 +25,7 @@ class Marc(Solver): MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT'] - for release,subdirs in sorted(self.releases.items(),reverse=True): + for release,subdirs in sorted(list(self.releases.items()),reverse=True): for subdir in subdirs: path = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir) if os.path.exists(path): return release @@ -40,7 +40,7 @@ class Marc(Solver): MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT'] - if len(releases) == 0: releases = self.releases.keys() + if len(releases) == 0: releases = list(self.releases.keys()) if type(releases) is not list: releases = [releases] for release in sorted(releases,reverse=True): if release not in self.releases: continue diff --git a/lib/damask/solver/solver.py b/lib/damask/solver/solver.py index 175a86cd6..d210595b5 100644 --- a/lib/damask/solver/solver.py +++ b/lib/damask/solver/solver.py @@ -15,7 +15,7 @@ class Solver(): 'spectral': damask.solver.Spectral, 'marc': damask.solver.Marc, } - if solver.lower() in solverClass.keys(): + if solver.lower() in list(solverClass.keys()): self.__class__=solverClass[solver.lower()] self.__init__() From f411405da65096fef951dd09d506311c3c3f9057 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 27 Jul 2016 14:29:16 +0200 Subject: [PATCH 02/88] wrong comment --- code/prec.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/prec.f90 b/code/prec.f90 index 0ded23528..778bd0b90 100644 --- a/code/prec.f90 +++ b/code/prec.f90 @@ -83,7 +83,7 @@ module prec nTwin = 0_pInt, & nTrans = 0_pInt logical :: & - nonlocal = .false. !< absolute tolerance for state integration + nonlocal = .false. real(pReal), pointer, dimension(:,:), contiguous :: & slipRate, & !< slip rate accumulatedSlip !< accumulated plastic slip From e68b06f1cb942fe5b23b4f1f4550e9a8eb3eb492 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 27 Jul 2016 14:32:03 +0200 Subject: [PATCH 03/88] might be helpful --- code/math.f90 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/code/math.f90 b/code/math.f90 index 8694e30ee..87b232575 100644 --- a/code/math.f90 +++ b/code/math.f90 @@ -158,7 +158,8 @@ module math math_areaTriangle, & math_rotate_forward33, & math_rotate_backward33, & - math_rotate_forward3333 + math_rotate_forward3333, & + math_limit private :: & math_partition, & halton, & From 53b94ddf59ea4ce283f53931c631de53358b941a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 27 Jul 2016 20:37:54 +0200 Subject: [PATCH 04/88] not needed (hides complexity) --- examples/SpectralMethod/Makefile | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 examples/SpectralMethod/Makefile diff --git a/examples/SpectralMethod/Makefile b/examples/SpectralMethod/Makefile deleted file mode 100644 index 3abb88f89..000000000 --- a/examples/SpectralMethod/Makefile +++ /dev/null @@ -1,12 +0,0 @@ -include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/variables -include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/rules - - -run16x16x16: - -@${MPIEXEC} -n 2 DAMASK_spectral -l tensionX.load -g 20grains16x16x16.geom - -run32x32x32: - -@${MPIEXEC} -n 4 DAMASK_spectral -l tensionX.load -g 20grains32x32x32.geom - -run64x64x64: - -@${MPIEXEC} -n 8 DAMASK_spectral -l tensionX.load -g 20grains64x64x64.geom From 12d13fb2fa888529d2cfcae4ec54d0bee7dd9f7a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 29 Jul 2016 16:27:27 +0200 Subject: [PATCH 05/88] giving unit --- code/spectral_thermal.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/spectral_thermal.f90 b/code/spectral_thermal.f90 index ab980e091..bb5747574 100644 --- a/code/spectral_thermal.f90 +++ b/code/spectral_thermal.f90 @@ -245,7 +245,7 @@ type(tSolutionState) function spectral_thermal_solution(guess,timeinc,timeinc_ol if (worldrank == 0) then if (spectral_thermal_solution%converged) & write(6,'(/,a)') ' ... thermal conduction converged ..................................' - write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature = ',& + write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',& minTemperature, maxTemperature, stagNorm write(6,'(/,a)') ' ===========================================================================' flush(6) From a6ee4216d6f33d8ff907cc4fa68f8138d7ba523b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 29 Jul 2016 16:34:51 +0200 Subject: [PATCH 06/88] commenting --- code/DAMASK_spectral.f90 | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 0be78083b..c363393d8 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -156,11 +156,9 @@ program DAMASK_spectral !-------------------------------------------------------------------------------------------------- ! init DAMASK (all modules) call CPFEM_initAll(el = 1_pInt, ip = 1_pInt) - mainProcess: if (worldrank == 0) then - write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>' - write(6,'(a15,a)') ' Current time: ',IO_timeStamp() + write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>' + write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - endif mainProcess !-------------------------------------------------------------------------------------------------- ! initialize field solver information @@ -199,14 +197,14 @@ program DAMASK_spectral allocate(loadCases(i)%ID(nActiveFields)) field = 1 loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default - if (any(thermal_type == THERMAL_conduction_ID)) then ! thermal field active + thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then field = field + 1 loadCases(i)%ID(field) = FIELD_THERMAL_ID - endif - if (any(damage_type == DAMAGE_nonlocal_ID)) then ! damage field active + endif thermalActive + damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then field = field + 1 loadCases(i)%ID(field) = FIELD_DAMAGE_ID - endif + endif damageActive enddo !-------------------------------------------------------------------------------------------------- From 0d1dfc153e1d25fb878837715061fd56831156a1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 31 Jul 2016 21:30:23 +0200 Subject: [PATCH 07/88] without ticks, zsh expands ... to ../.. for some strange reasons --- DAMASK_env.zsh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DAMASK_env.zsh b/DAMASK_env.zsh index 2ce2351cd..3bbab82df 100644 --- a/DAMASK_env.zsh +++ b/DAMASK_env.zsh @@ -41,7 +41,7 @@ if [ ! -z "$PS1" ]; then echo Max-Planck-Institut für Eisenforschung GmbH, Düsseldorf echo https://damask.mpie.de echo - echo Using environment with ... + echo "Using environment with ..." echo "DAMASK $DAMASK_ROOT" echo "Spectral Solver $SOLVER" echo "Post Processing $PROCESSING" From 6c826417132f6b3a04b504198a9f184368e7fae3 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 1 Aug 2016 16:43:14 +0200 Subject: [PATCH 08/88] added defaults (bugfix) --- processing/post/addOrientations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index 6a6219a5a..c778e67b0 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -69,7 +69,8 @@ parser.add_option('-q', '--quaternion', parser.set_defaults(output = [], symmetry = damask.Symmetry.lattices[-1], - rotation = (0.,1.,1.,1.), # no rotation about 1,1,1 + labrotation = (0.,1.,1.,1.), # no rotation about 1,1,1 + crystalrotation = (0.,1.,1.,1.), # no rotation about 1,1,1 degrees = False, ) From 0219a5f0e3c53a3d239f2280ea4b65a842ce7f4c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 1 Aug 2016 17:01:13 +0200 Subject: [PATCH 09/88] regex match --- lib/damask/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/damask/environment.py b/lib/damask/environment.py index d301a5498..fedc22e36 100644 --- a/lib/damask/environment.py +++ b/lib/damask/environment.py @@ -22,7 +22,7 @@ class Environment(): for line in configFile: l = re.sub('^set ', '', line).strip() # remove "set" (tcsh) when setting variables if l and not l.startswith('#'): - items = map(string.strip,l.split('=')) + items = re.split(r'\s*=\s*',l) if len(items) == 2: self.options[items[0].upper()] = \ re.sub('\$\{*DAMASK_ROOT\}*',self.rootDir(),os.path.expandvars(items[1])) # expand all shell variables and DAMASK_ROOT From 6b003c6ea81878a05000b4a896c0cd63cc3ed757 Mon Sep 17 00:00:00 2001 From: Test User Date: Tue, 2 Aug 2016 04:27:21 +0200 Subject: [PATCH 10/88] updated version information after successful test of v2.0.1-35-g6c82641 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 5af1fb958..1ec9fb2d4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-5-g920cf2c +v2.0.1-35-g6c82641 From 22d275b009b8b1f27f98ebbcdcdae504c877c4b6 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 8 Aug 2016 10:36:34 +0200 Subject: [PATCH 11/88] initialization also needed for "empty" phase to allocate state arrays seems to be safe --- code/plastic_nonlocal.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/plastic_nonlocal.f90 b/code/plastic_nonlocal.f90 index cb2b31772..f7b36897a 100644 --- a/code/plastic_nonlocal.f90 +++ b/code/plastic_nonlocal.f90 @@ -1115,7 +1115,7 @@ allocate(nonSchmidProjection(3,3,4,maxTotalNslip,maxNinstances), initializeInstances: do phase = 1_pInt, size(phase_plasticity) NofMyPhase=count(material_phase==phase) - myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID .and. NofMyPhase/=0) then + myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID) then instance = phase_plasticityInstance(phase) !*** Inverse lookup of my slip system family and the slip system in lattice From 535f8be76ac99c2ba68c955f9095e3269af2113f Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 8 Aug 2016 16:27:05 +0200 Subject: [PATCH 12/88] updated version information after successful test of v2.0.1-39-g22d275b --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 1ec9fb2d4..5a316242f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-35-g6c82641 +v2.0.1-39-g22d275b From d239cab3aea994a651033b5d6d807cc86679a13e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 11 Aug 2016 13:48:15 +0200 Subject: [PATCH 13/88] corrected help --- processing/misc/OIMgrainFile_toTable.py | 2 +- processing/misc/ang_toTable.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/processing/misc/OIMgrainFile_toTable.py b/processing/misc/OIMgrainFile_toTable.py index 74d5d1819..c936d92e6 100755 --- a/processing/misc/OIMgrainFile_toTable.py +++ b/processing/misc/OIMgrainFile_toTable.py @@ -12,7 +12,7 @@ scriptID = ' '.join([scriptName,damask.version]) # MAIN #-------------------------------------------------------------------------------------------------- -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ +parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """ Adds header to OIM grain file to make it accesible as ASCII table """, version = scriptID) diff --git a/processing/misc/ang_toTable.py b/processing/misc/ang_toTable.py index 177955f17..19fdcd55b 100755 --- a/processing/misc/ang_toTable.py +++ b/processing/misc/ang_toTable.py @@ -13,7 +13,7 @@ scriptID = ' '.join([scriptName,damask.version]) # MAIN #-------------------------------------------------------------------------------------------------- -parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile[s]]', description = """ +parser = OptionParser(option_class=damask.extendableOption, usage='%prog [angfile[s]]', description = """ Convert TSL/EDAX *.ang file to ASCIItable """, version = scriptID) @@ -30,7 +30,7 @@ for name in filenames: outname = os.path.splitext(name)[0]+'.txt' if name else name, buffered = False, labeled = False) except: continue - table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else '')) + damask.util.report(scriptName,name) # --- interpret header ----------------------------------------------------------------------------- From 6bdce9b3320fa3505c2d6e077a84f80ec9d526b0 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 11 Aug 2016 14:22:07 -0400 Subject: [PATCH 14/88] fixed error for completely empty label list in read_array --- lib/damask/asciitable.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/damask/asciitable.py b/lib/damask/asciitable.py index 4fe4f9156..145b87974 100644 --- a/lib/damask/asciitable.py +++ b/lib/damask/asciitable.py @@ -501,10 +501,10 @@ class ASCIItable(): columns = [] for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ... # ... transparently add all components unless column referenced by number or with explicit dimension - columns += range(c,c + \ - (d if str(c) != str(labels[present[i]]) else \ + columns += range(c,c + + (d if str(c) != str(labels[present[i]]) else 1)) - use = np.array(columns) + use = np.array(columns) if len(columns) > 0 else None self.tags = list(np.array(self.tags)[use]) # update labels with valid subset From 600731b15ca2fd90ffedbb5c284114082589a5b0 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 11 Aug 2016 14:23:29 -0400 Subject: [PATCH 15/88] modernized, gracefully add NaN for out-of-bounds mapping --- processing/post/addMapped.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/processing/post/addMapped.py b/processing/post/addMapped.py index c57e62d8b..f67d88d15 100755 --- a/processing/post/addMapped.py +++ b/processing/post/addMapped.py @@ -2,6 +2,7 @@ # -*- coding: UTF-8 no BOM -*- import os,sys +import numpy as np from optparse import OptionParser import damask @@ -28,7 +29,7 @@ parser.add_option('-o','--offset', parser.add_option('-l','--label', dest = 'label', action = 'extend', metavar = '', - help='heading of column(s) to be mapped') + help='column label(s) to be mapped') parser.add_option('-a','--asciitable', dest = 'asciitable', type = 'string', metavar = 'string', @@ -49,12 +50,13 @@ if options.map is None: if options.asciitable is not None and os.path.isfile(options.asciitable): mappedTable = damask.ASCIItable(name = options.asciitable, - buffered = False, readonly = True) + buffered = False, + readonly = True) mappedTable.head_read() # read ASCII header info of mapped table missing_labels = mappedTable.data_readArray(options.label) if len(missing_labels) > 0: - mappedTable.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels))) + damask.util.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels))) else: parser.error('no mapped ASCIItable given.') @@ -64,9 +66,8 @@ else: if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name, - buffered = False) + try: table = damask.ASCIItable(name = name, + buffered = False) except: continue damask.util.report(scriptName,name) @@ -96,7 +97,10 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types + try: + table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types + except IndexError: + table.data_append(np.nan*np.ones_like(mappedTable.data[0])) outputAlive = table.data_write() # output processed line # ------------------------------------------ output finalization ----------------------------------- From 64ac05fc26374c12b1dc46a35b82d842740eaf4f Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 11 Aug 2016 14:24:40 -0400 Subject: [PATCH 16/88] removed left-over debug output --- processing/post/addSchmidfactors.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index 0bc529034..067c3036a 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -167,7 +167,6 @@ force = np.array(options.force) force /= np.linalg.norm(force) if options.normal: - damask.util.croak('got normal') normal = np.array(options.normal) normal /= np.linalg.norm(normal) if abs(np.dot(force,normal)) > 1e-3: From 49f94a1cb57e09ab23b6c0ff4abe08dc4784f62b Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 12 Aug 2016 04:27:00 +0200 Subject: [PATCH 17/88] updated version information after successful test of v2.0.1-43-g64ac05f --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 5a316242f..921b19cbd 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-39-g22d275b +v2.0.1-43-g64ac05f From c28649d348395bd01c3284ec21fecafdcc04aea2 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 16 Aug 2016 13:30:11 +0200 Subject: [PATCH 18/88] error check for mpi parallelization --- code/mesh.f90 | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/code/mesh.f90 b/code/mesh.f90 index 0562ab218..32f94d66b 100644 --- a/code/mesh.f90 +++ b/code/mesh.f90 @@ -481,6 +481,7 @@ subroutine mesh_init(ip,el) #endif #ifdef Spectral IO_open_file, & + IO_error, & #else IO_open_InputFile, & #endif @@ -507,7 +508,8 @@ subroutine mesh_init(ip,el) implicit none #ifdef Spectral - integer(C_INTPTR_T) :: gridMPI(3), alloc_local, local_K, local_K_offset + integer(C_INTPTR_T) :: devNull, local_K, local_K_offset + integer :: ierr, worldsize #endif integer(pInt), parameter :: FILEUNIT = 222_pInt integer(pInt), intent(in) :: el, ip @@ -547,10 +549,13 @@ subroutine mesh_init(ip,el) call IO_open_file(FILEUNIT,geometryFile) ! parse info from geometry file... if (myDebug) write(6,'(a)') ' Opened geometry file'; flush(6) grid = mesh_spectral_getGrid(fileUnit) + call MPI_comm_size(MPI_COMM_WORLD, worldsize, ierr) + if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_comm_size') + if(worldsize>grid(3)) call IO_error(894_pInt, ext_msg='number of processes exceeds grid(3)') + geomSize = mesh_spectral_getSize(fileUnit) - gridMPI = int(grid,C_INTPTR_T) - alloc_local = fftw_mpi_local_size_3d(gridMPI(3), gridMPI(2), gridMPI(1)/2 +1, & - MPI_COMM_WORLD, local_K, local_K_offset) + devNull = fftw_mpi_local_size_3d(int(grid(3),C_INTPTR_T),int(grid(2),C_INTPTR_T),& + int(grid(1),C_INTPTR_T)/2+1,MPI_COMM_WORLD,local_K,local_K_offset) grid3 = int(local_K,pInt) grid3Offset = int(local_K_offset,pInt) size3 = geomSize(3)*real(grid3,pReal) /real(grid(3),pReal) From 2738415b348a618fbbb7dde0111f2cef6fe6d4b8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 20 Aug 2016 06:59:50 +0200 Subject: [PATCH 19/88] removed twinning parameters --- .../Phase_Phenopowerlaw_BCC-Ferrite.config | 26 ++++++------------- .../Phase_Phenopowerlaw_BCC-Martensite.config | 25 ++++++------------ 2 files changed, 16 insertions(+), 35 deletions(-) diff --git a/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Ferrite.config b/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Ferrite.config index 7344ef455..6efd84f65 100644 --- a/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Ferrite.config +++ b/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Ferrite.config @@ -1,35 +1,25 @@ # Tasan et.al. 2015 Acta Materalia # Tasan et.al. 2015 International Journal of Plasticity # Diehl et.al. 2015 Meccanica -[BCC_Ferrite] +[BCC-Ferrite] + elasticity hooke plasticity phenopowerlaw lattice_structure bcc -Nslip 12 12 # per family -Ntwin 0 # per family +Nslip 12 12 # per family +Ntwin 0 # per family c11 233.3e9 c12 135.5e9 c44 118.0e9 gdot0_slip 0.001 n_slip 20 -tau0_slip 95.e6 97.e6 0 0 # per family, optimization long simplex 109 -tausat_slip 222.e6 412.7e6 0 0 # per family, optimization long simplex 109 -gdot0_twin 0.001 -n_twin 20 -tau0_twin 31.0e6 # per family -s_pr 0 # push-up factor for slip saturation due to twinning -twin_b 0 -twin_c 0 -twin_d 0 -twin_e 0 -h0_slipslip 1000.0e6 # opti -h0_twinslip 0 -h0_twintwin 0 +tau0_slip 95.e6 97.e6 # per family, optimization long simplex 109 +tausat_slip 222.e6 412.7e6 # per family, optimization long simplex 109 +h0_slipslip 1000.0e6 interaction_slipslip 1 1 1.4 1.4 1.4 1.4 interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 -w0_slip 2.0 # opti -atol_resistance 1 +w0_slip 2.0 (output) totalshear diff --git a/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Martensite.config b/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Martensite.config index b9960d325..89ae0339b 100644 --- a/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Martensite.config +++ b/examples/ConfigFiles/Phase_Phenopowerlaw_BCC-Martensite.config @@ -1,34 +1,25 @@ # Tasan et.al. 2015 Acta Materalia # Tasan et.al. 2015 International Journal of Plasticity # Diehl et.al. 2015 Meccanica -[BCC_Martensite] -plasticity phenopowerlaw +[BCC-Martensite] + elasticity hooke +plasticity phenopowerlaw + lattice_structure bcc -Nslip 12 12 # per family -Ntwin 0 # per family +Nslip 12 12 # per family +Ntwin 0 # per family c11 417.4e9 c12 242.4e9 c44 211.1e9 gdot0_slip 0.001 n_slip 20 -tau0_slip 405.8e6 456.7e6 0 0 # per family -tausat_slip 872.9e6 971.2e6 0 0 # per family -gdot0_twin 0.001 -n_twin 20 -tau0_twin 31.0e6 # per family -s_pr 0 # push-up factor for slip saturation due to twinning -twin_b 0 -twin_c 0 -twin_d 0 -twin_e 0 +tau0_slip 405.8e6 456.7e6 # per family +tausat_slip 872.9e6 971.2e6 # per family h0_slipslip 563.0e9 -h0_twinslip 0 -h0_twintwin 0 interaction_slipslip 1 1 1.4 1.4 1.4 1.4 interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 w0_slip 2.0 -atol_resistance 1 (output) totalshear From 59e7a41aa2093da5a7bcd75d661f89ed60f4fbf6 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 20 Aug 2016 07:14:18 +0200 Subject: [PATCH 20/88] now longer write empty file if file given in {} not existing --- code/IO.f90 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/code/IO.f90 b/code/IO.f90 index db0c056fe..22b56d819 100644 --- a/code/IO.f90 +++ b/code/IO.f90 @@ -129,6 +129,7 @@ recursive function IO_read(fileUnit,reset) result(line) !-------------------------------------------------------------------------------------------------- ! normal case if (input == '') return ! regular line + !-------------------------------------------------------------------------------------------------- ! recursion case if (stack >= 10_pInt) call IO_error(104_pInt,ext_msg=input) ! recursion limit reached @@ -141,7 +142,7 @@ recursive function IO_read(fileUnit,reset) result(line) pathOn(stack) = path(1:scan(path,SEP,.true.))//input ! glue include to current file's dir endif - open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack)) ! open included file + open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack),action=read) ! open included file if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=pathOn(stack)) line = IO_read(fileUnit) From 4dfc8d0132e8155da1fa531a6c28e12b8c8017de Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Mon, 22 Aug 2016 10:27:49 -0400 Subject: [PATCH 21/88] add MPI_finalize() following Martin & Philip suggestions; --- code/DAMASK_spectral.f90 | 118 ++++++++++++++++++++------------------- 1 file changed, 62 insertions(+), 56 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index c363393d8..96b3c1451 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -81,7 +81,7 @@ program DAMASK_spectral use spectral_mech_Polarisation use spectral_damage use spectral_thermal - + implicit none @@ -93,9 +93,9 @@ program DAMASK_spectral logical, dimension(9) :: temp_maskVector = .false. !< temporarily from loadcase file when reading in tensors integer(pInt), parameter :: FILEUNIT = 234_pInt !< file unit, DAMASK IO does not support newunit feature integer(pInt), allocatable, dimension(:) :: chunkPos - + integer(pInt) :: & - N_t = 0_pInt, & !< # of time indicators found in load case file + N_t = 0_pInt, & !< # of time indicators found in load case file N_n = 0_pInt, & !< # of increment specifiers found in load case file N_def = 0_pInt !< # of rate of deformation specifiers found in load case file character(len=65536) :: & @@ -105,7 +105,7 @@ program DAMASK_spectral ! loop variables, convergence etc. real(pReal), dimension(3,3), parameter :: & ones = 1.0_pReal, & - zeros = 0.0_pReal + zeros = 0.0_pReal integer(pInt), parameter :: & subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0 real(pReal) :: & @@ -150,6 +150,7 @@ program DAMASK_spectral MPI_file_get_position, & MPI_file_write, & MPI_abort, & + MPI_finalize, & MPI_allreduce, & PETScFinalize @@ -159,7 +160,7 @@ program DAMASK_spectral write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>' write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - + !-------------------------------------------------------------------------------------------------- ! initialize field solver information nActiveFields = 1 @@ -192,14 +193,14 @@ program DAMASK_spectral call IO_error(error_ID=837_pInt,ext_msg = trim(loadCaseFile)) ! error message for incomplete loadcase allocate (loadCases(N_n)) ! array of load cases loadCases%P%myType='p' - + do i = 1, size(loadCases) allocate(loadCases(i)%ID(nActiveFields)) field = 1 loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then field = field + 1 - loadCases(i)%ID(field) = FIELD_THERMAL_ID + loadCases(i)%ID(field) = FIELD_THERMAL_ID endif thermalActive damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then field = field + 1 @@ -231,11 +232,11 @@ program DAMASK_spectral do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a * enddo - do j = 1_pInt,9_pInt + do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation - transpose(reshape(temp_maskVector,[ 3,3])) + transpose(reshape(temp_maskVector,[ 3,3])) loadCases(currentLoadCase)%deformation%maskFloat = & ! float (1.0/0.0) mask in 3x3 notation merge(ones,zeros,loadCases(currentLoadCase)%deformation%maskLogical) loadCases(currentLoadCase)%deformation%values = math_plain9to33(temp_valueVector) ! values in 3x3 notation @@ -259,10 +260,10 @@ program DAMASK_spectral loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt) loadCases(currentLoadCase)%logscale = 1_pInt case('freq','frequency','outputfreq') ! frequency of result writings - loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt) + loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt) case('r','restart','restartwrite') ! frequency of writing restart information loadCases(currentLoadCase)%restartfrequency = & - max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt)) + max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt)) case('guessreset','dropguessing') loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory case('euler') ! rotation of currentLoadCase given in euler angles @@ -271,10 +272,10 @@ program DAMASK_spectral k = 1_pInt ! assuming keyword indicating degree/radians present select case (IO_lc(IO_stringValue(line,chunkPos,i+1_pInt))) case('deg','degree') - case('rad','radian') ! don't convert from degree to radian + case('rad','radian') ! don't convert from degree to radian l = 0_pInt - case default - k = 0_pInt + case default + k = 0_pInt end select do j = 1_pInt, 3_pInt temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j) @@ -289,7 +290,7 @@ program DAMASK_spectral loadCases(currentLoadCase)%rotation = math_plain9to33(temp_valueVector) end select enddo; enddo - close(FILEUNIT) + close(FILEUNIT) !-------------------------------------------------------------------------------------------------- ! consistency checks and output of load case @@ -323,7 +324,7 @@ program DAMASK_spectral enddo if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. & loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only - if (any(loadCases(currentLoadCase)%P%maskLogical .and. & + if (any(loadCases(currentLoadCase)%P%maskLogical .and. & transpose(loadCases(currentLoadCase)%P%maskLogical) .and. & reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) & errorID = 838_pInt ! no rotation is allowed by stress BC @@ -358,7 +359,7 @@ program DAMASK_spectral endif !-------------------------------------------------------------------------------------------------- -! doing initialization depending on selected solver +! doing initialization depending on selected solver call Utilities_init() do field = 1, nActiveFields select case (loadCases(1)%ID(field)) @@ -370,26 +371,26 @@ program DAMASK_spectral if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & call IO_warning(42_pInt, ext_msg='debug Divergence') call AL_init - + case (DAMASK_spectral_SolverPolarisation_label) if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & call IO_warning(42_pInt, ext_msg='debug Divergence') call Polarisation_init - + case default call IO_error(error_ID = 891, ext_msg = trim(spectral_solver)) - - end select - + + end select + case(FIELD_THERMAL_ID) call spectral_thermal_init - + case(FIELD_DAMAGE_ID) call spectral_damage_init() end select enddo - + !-------------------------------------------------------------------------------------------------- ! write header of output file if (worldrank == 0) then @@ -408,7 +409,7 @@ program DAMASK_spectral write(resUnit) 'logscales:', loadCases%logscale write(resUnit) 'increments:', loadCases%incs ! one entry per LoadCase write(resUnit) 'startingIncrement:', restartInc - 1_pInt ! start with writing out the previous inc - write(resUnit) 'eoh' + write(resUnit) 'eoh' close(resUnit) ! end of header open(newunit=statUnit,file=trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//& '.sta',form='FORMATTED',status='REPLACE') @@ -458,7 +459,7 @@ program DAMASK_spectral !-------------------------------------------------------------------------------------------------- ! loopping over loadcases loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases) - time0 = time ! currentLoadCase start time + time0 = time ! currentLoadCase start time guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc !-------------------------------------------------------------------------------------------------- @@ -472,9 +473,9 @@ program DAMASK_spectral if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal) ! only valid for given linear time scale. will be overwritten later in case loglinear scale is used else - if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale + if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale if (inc == 1_pInt) then ! 1st inc of 1st currentLoadCase of logarithmic scale - timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd + timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd else ! not-1st inc of 1st currentLoadCase of logarithmic scale timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal)) endif @@ -492,12 +493,12 @@ program DAMASK_spectral stepFraction = 0_pInt !-------------------------------------------------------------------------------------------------- -! loop over sub incs +! loop over sub incs subIncLooping: do while (stepFraction/subStepFactor**cutBackLevel <1_pInt) time = time + timeinc ! forward time - stepFraction = stepFraction + 1_pInt + stepFraction = stepFraction + 1_pInt remainingLoadCaseTime = time0 - time + loadCases(currentLoadCase)%time + timeInc - + !-------------------------------------------------------------------------------------------------- ! report begin of new increment if (worldrank == 0) then @@ -515,7 +516,7 @@ program DAMASK_spectral ',a,'//IO_intOut(stepFraction)//',a,'//IO_intOut(subStepFactor**cutBackLevel)//')') & 'Increment ',totalIncsCounter,'/',sum(loadCases%incs),& '-',stepFraction, '/', subStepFactor**cutBackLevel - endif + endif !-------------------------------------------------------------------------------------------------- ! forward fields @@ -541,18 +542,18 @@ program DAMASK_spectral F_BC = loadCases(currentLoadCase)%deformation, & P_BC = loadCases(currentLoadCase)%P, & rotation_BC = loadCases(currentLoadCase)%rotation) - end select - + end select + case(FIELD_THERMAL_ID) call spectral_thermal_forward (& guess,timeinc,timeIncOld,remainingLoadCaseTime) - + case(FIELD_DAMAGE_ID) call spectral_damage_forward (& guess,timeinc,timeIncOld,remainingLoadCaseTime) end select - enddo - + enddo + !-------------------------------------------------------------------------------------------------- ! solve fields stagIter = 0_pInt @@ -568,27 +569,27 @@ program DAMASK_spectral P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - + case (DAMASK_spectral_SolverAL_label) solres(field) = AL_solution (& incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - + case (DAMASK_spectral_SolverPolarisation_label) solres(field) = Polarisation_solution (& incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - - end select - + + end select + case(FIELD_THERMAL_ID) solres(field) = spectral_thermal_solution (& guess,timeinc,timeIncOld,remainingLoadCaseTime) - + case(FIELD_DAMAGE_ID) solres(field) = spectral_damage_solution (& guess,timeinc,timeIncOld,remainingLoadCaseTime) @@ -600,11 +601,11 @@ program DAMASK_spectral stagIterate = stagIter < stagItMax .and. & all(solres(:)%converged) .and. & .not. all(solres(:)%stagConverged) - enddo + enddo !-------------------------------------------------------------------------------------------------- -! check solution - cutBack = .False. +! check solution + cutBack = .False. if(solres(1)%termIll .or. .not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found if (cutBackLevel < maxCutBack) then ! do cut back if (worldrank == 0) write(6,'(/,a)') ' cut back detected' @@ -617,8 +618,8 @@ program DAMASK_spectral call IO_warning(850_pInt) call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written elseif (continueCalculation == 1_pInt) then - guess = .true. ! accept non converged BVP solution - else ! default behavior, exit if spectral solver does not converge + guess = .true. ! accept non converged BVP solution + else ! default behavior, exit if spectral solver does not converge call IO_warning(850_pInt) call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written endif @@ -630,8 +631,8 @@ program DAMASK_spectral write(statUnit,*) totalIncsCounter, time, cutBackLevel, & solres%converged, solres%iterationsNeeded ! write statistics about accepted solution flush(statUnit) - endif - endif + endif + endif enddo subIncLooping cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc if(all(solres(:)%converged)) then ! report converged inc @@ -662,11 +663,11 @@ program DAMASK_spectral enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position endif - if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving - mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? + if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving + mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? restartWrite = .true. lastRestartWritten = inc - endif + endif else forwarding time = time + timeinc guess = .true. @@ -698,7 +699,7 @@ program DAMASK_spectral call AL_destroy() case (DAMASK_spectral_SolverPolarisation_label) call Polarisation_destroy() - end select + end select case(FIELD_THERMAL_ID) call spectral_thermal_destroy() case(FIELD_DAMAGE_ID) @@ -709,6 +710,11 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) + #ifdef _OPENMP + call MPI_finalize(i) + if (i /= 0_pInt) call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + #endif + if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged call quit(0_pInt) ! no complains ;) @@ -719,7 +725,7 @@ end program DAMASK_spectral !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @brief quit subroutine to mimic behavior of FEM solvers !> @details exits the Spectral solver and reports time and duration. Exit code 0 signals -!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code +!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code !> 2 signals no converged solution and increment of last saved restart information is written to !> stderr. Exit code 3 signals no severe problems, but some increments did not converge !-------------------------------------------------------------------------------------------------- @@ -739,7 +745,7 @@ subroutine quit(stop_id) write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',& dateAndTime(6),':',& dateAndTime(7) - + if (stop_id == 0_pInt) stop 0 ! normal termination if (stop_id < 0_pInt) then ! terminally ill, restart might help write(0,'(a,i6)') 'restart information available at ', stop_id*(-1_pInt) From 3d0e19de0a3632daf1908652913abf02868466de Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Mon, 22 Aug 2016 17:45:05 -0400 Subject: [PATCH 22/88] syntax polish for ifdef --- code/DAMASK_spectral.f90 | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 96b3c1451..1d3a0a0cb 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -711,8 +711,10 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) #ifdef _OPENMP - call MPI_finalize(i) - if (i /= 0_pInt) call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + call MPI_finalize(i) + if (i /= 0_pInt) then + call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + endif #endif if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged From af10f920419506b97f7abf6d3c1119bfb5e8723b Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Tue, 23 Aug 2016 08:29:42 -0400 Subject: [PATCH 23/88] remove space before preprocessor --- code/DAMASK_spectral.f90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 1d3a0a0cb..2793b502c 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -710,12 +710,12 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) - #ifdef _OPENMP +#ifdef _OPENMP call MPI_finalize(i) if (i /= 0_pInt) then call IO_error(error_ID=894, el=i, ext_msg="Finalize()") endif - #endif +#endif if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged call quit(0_pInt) ! no complains ;) From 7710f4d444556116f8da91c8189b763efab1c942 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 23 Aug 2016 17:21:17 +0200 Subject: [PATCH 24/88] hybridIA (stand alone script and DAMASK) need standard ASCII table export of TSL OIM is similar to standard ASCII table, simply add correct header including labels --- processing/pre/OIMlinear2linearODF.py | 106 -------------------------- 1 file changed, 106 deletions(-) delete mode 100755 processing/pre/OIMlinear2linearODF.py diff --git a/processing/pre/OIMlinear2linearODF.py b/processing/pre/OIMlinear2linearODF.py deleted file mode 100755 index d50c07a37..000000000 --- a/processing/pre/OIMlinear2linearODF.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python2.7 -# -*- coding: UTF-8 no BOM -*- - -import os,sys -from optparse import OptionParser -import numpy as np -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - -sampleSym = { 'Orthotropic' : (90,90,90), - 'Triclinic' : (360,180,360) - } - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Transform the binned texture data from "TSL OIM Analysis" into linear ODF data, - -""", version = scriptID) - -parser.add_option('-s', '--symmetry', dest='symmetry', choices=sampleSym.keys(), - metavar = 'string', - help='Sample symmetry {%s} [Triclinic]'%(' '.join(sampleSym.keys()))) - -parser.set_defaults(symmetry = 'Triclinic') - -(options,filenames) = parser.parse_args() - -#--- setup file handles --------------------------------------------------------------------------- -files = [] -if filenames == []: - files.append({'name':'STDIN', - 'input':sys.stdin, - 'output':sys.stdout, - 'croak':sys.stderr, - }) -else: - for name in filenames: - if os.path.exists(name): - files.append({'name':name, - 'input':open(name), - 'output':open(name+'_tmp','w'), - 'croak':sys.stdout, - }) - -#--- loop over input files ------------------------------------------------------------------------ -for file in files: - file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n') - - while True: # read header (forward and get bin Size) - line = file['input'].readline() - words = line.split() - if len(words)>=3: - if words[1]=='Bin' and words[2]=='Size:': binSize=float(words[3][:-1]) - if not line.startswith('#'): break - - delta = [sampleSym[options.symmetry][i]/binSize for i in xrange(3)] - - nPhi1,nPHI,nPhi2 = map(int,delta) - dPhi1,dPHI,dPhi2 = [sampleSym[options.symmetry][i]/delta[i] for i in xrange(3)] - - N = (nPhi1-1)*(nPHI-1)*(nPhi2-1) - - - ODF = [[[[None] for k in range(nPhi2)] for j in range(nPHI)] for i in range(nPhi1)] - linear = [None]*N - - ODF = np.empty([nPhi1,nPHI,nPhi2],'d') - - for iPhi1 in range(nPhi1): - for iPHI in range(nPHI): - for iPhi2 in range(nPhi2): - ODF[iPhi1,iPHI,iPhi2] = float(line.split()[3])*0.125 # extract intensity (in column 4) and weight by 1/8 - line = file['input'].readline() - - for iPhi1 in range(nPhi1-1): - for iPHI in range(nPHI-1): - for iPhi2 in range(nPhi2-1): - linear[iPhi1*(nPHI-1)*(nPhi2-1)+iPHI*(nPhi2-1)+iPhi2] =\ - ODF[iPhi1 ,iPHI ,iPhi2 ] +\ - ODF[iPhi1 ,iPHI ,iPhi2+1] +\ - ODF[iPhi1 ,iPHI+1,iPhi2 ] +\ - ODF[iPhi1 ,iPHI+1,iPhi2+1] +\ - ODF[iPhi1+1,iPHI ,iPhi2 ] +\ - ODF[iPhi1+1,iPHI ,iPhi2+1] +\ - ODF[iPhi1+1,iPHI+1,iPhi2 ] +\ - ODF[iPhi1+1,iPHI+1,iPhi2+1] - - - file['output'].write('4 header\n') - file['output'].write('limit phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%sampleSym[options.symmetry]) - file['output'].write('delta phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%(dPhi1,dPHI,dPhi2)) - file['output'].write('centration cell-centered\n') - file['output'].write('density\n') - - for i in range(N): - file['output'].write('%g\n'%(linear[i])) - -#--- output finalization -------------------------------------------------------------------------- - if file['name'] != 'STDIN': - file['output'].close() - os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] +'.linearODF') From 5b6f41cb6b7c73976c5cf74922a17e31ed7833c1 Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 24 Aug 2016 04:27:41 +0200 Subject: [PATCH 25/88] updated version information after successful test of v2.0.1-47-gaf10f92 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 921b19cbd..a4ea1f3b9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-43-g64ac05f +v2.0.1-47-gaf10f92 From f984f1ebea85ff8dc502330684b628945aa95b62 Mon Sep 17 00:00:00 2001 From: Zhuowen Zhao Date: Wed, 24 Aug 2016 16:05:50 -0400 Subject: [PATCH 26/88] enable floating point data and corrected counting of microstructures --- processing/pre/geom_translate.py | 39 +++++++++++++++++++------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/processing/pre/geom_translate.py b/processing/pre/geom_translate.py index 83c71aa8d..f0d39e66f 100755 --- a/processing/pre/geom_translate.py +++ b/processing/pre/geom_translate.py @@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute', dest = 'substitute', action = 'extend', metavar = '', help = 'substitutions of microstructure indices from,to,from,to,...') +parser.add_option('--float', + dest = 'real', + action = 'store_true', + help = 'use float input') parser.set_defaults(origin = (0.0,0.0,0.0), microstructure = 0, substitute = [], + real = False, ) (options, filenames) = parser.parse_args() +datatype = 'f' if options.real else 'i' + sub = {} -for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to" +for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to" sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1]) -# --- loop over input files ------------------------------------------------------------------------- +# --- loop over input files ---------------------------------------------------------------------- if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name, - buffered = False, labeled = False) + try: table = damask.ASCIItable(name = name, + buffered = False, + labeled = False) except: continue damask.util.report(scriptName,name) -# --- interpret header ---------------------------------------------------------------------------- +# --- interpret header --------------------------------------------------------------------------- table.head_read() info,extra_header = table.head_getGeom() @@ -73,9 +80,9 @@ for name in filenames: table.close(dismiss = True) continue -# --- read data ------------------------------------------------------------------------------------ +# --- read data ---------------------------------------------------------------------------------- - microstructure = table.microstructure_read(info['grid']) # read microstructure + microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure # --- do work ------------------------------------------------------------------------------------ @@ -90,9 +97,9 @@ for name in filenames: substituted += options.microstructure # shift microstructure indices newInfo['origin'] = info['origin'] + options.origin - newInfo['microstructures'] = substituted.max() + newInfo['microstructures'] = len(np.unique(substituted)) -# --- report --------------------------------------------------------------------------------------- +# --- report ------------------------------------------------------------------------------------- remarks = [] if (any(newInfo['origin'] != info['origin'])): @@ -101,7 +108,7 @@ for name in filenames: remarks.append('--> microstructures: %i'%newInfo['microstructures']) if remarks != []: damask.util.croak(remarks) -# --- write header --------------------------------------------------------------------------------- +# --- write header ------------------------------------------------------------------------------- table.labels_clear() table.info_clear() @@ -116,12 +123,12 @@ for name in filenames: ]) table.head_write() -# --- write microstructure information ------------------------------------------------------------ +# --- write microstructure information ----------------------------------------------------------- - formatwidth = int(math.floor(math.log10(microstructure.max())+1)) + format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1))) table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose() - table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ') + table.data_writeArray(format,delimiter = ' ') -# --- output finalization -------------------------------------------------------------------------- +# --- output finalization ------------------------------------------------------------------------ - table.close() # close ASCII table + table.close() # close ASCII table From 64db098e2a0701589926278c81b7c998f87eadea Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 25 Aug 2016 04:27:12 +0200 Subject: [PATCH 27/88] updated version information after successful test of v2.0.1-49-gf984f1e --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index a4ea1f3b9..a0ee30953 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-47-gaf10f92 +v2.0.1-49-gf984f1e From 60765067389f77f21bbedcbfac0655fc71899c4f Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 11:57:19 -0400 Subject: [PATCH 28/88] general polishing and removal of redundant do-loop --- code/DAMASK_spectral.f90 | 53 +++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 28 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 2793b502c..67e3f4042 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -231,8 +231,6 @@ program DAMASK_spectral endif do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a * - enddo - do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation @@ -244,8 +242,6 @@ program DAMASK_spectral temp_valueVector = 0.0_pReal do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk - enddo - do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3])) @@ -302,14 +298,14 @@ program DAMASK_spectral write(6,'(1x,a,i6)') 'load case: ', currentLoadCase if (.not. loadCases(currentLoadCase)%followFormerTrajectory) & write(6,'(2x,a)') 'drop guessing along trajectory' - if (loadCases(currentLoadCase)%deformation%myType=='l') then + if (loadCases(currentLoadCase)%deformation%myType == 'l') then do j = 1_pInt, 3_pInt if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. & any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) & errorID = 832_pInt ! each row should be either fully or not at all defined enddo write(6,'(2x,a)') 'velocity gradient:' - else if (loadCases(currentLoadCase)%deformation%myType=='f') then + else if (loadCases(currentLoadCase)%deformation%myType == 'f') then write(6,'(2x,a)') 'deformation gradient at end of load case:' else write(6,'(2x,a)') 'deformation gradient rate:' @@ -318,13 +314,13 @@ program DAMASK_spectral if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j) else - write(6,'(2x,12a)',advance='no') ' * ' + write(6,'(2x,12a)',advance='no') ' * ' endif enddo; write(6,'(/)',advance='no') enddo if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. & - loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only - if (any(loadCases(currentLoadCase)%P%maskLogical .and. & + loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only + if (any(loadCases(currentLoadCase)%P%maskLogical .and. & transpose(loadCases(currentLoadCase)%P%maskLogical) .and. & reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) & errorID = 838_pInt ! no rotation is allowed by stress BC @@ -333,12 +329,12 @@ program DAMASK_spectral if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal else - write(6,'(2x,12a)',advance='no') ' * ' + write(6,'(2x,12a)',advance='no') ' * ' endif enddo; write(6,'(/)',advance='no') enddo if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, & - math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >& + math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > & reshape(spread(tol_math_check,1,9),[ 3,3]))& .or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > & 1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain @@ -378,10 +374,10 @@ program DAMASK_spectral call Polarisation_init case default - call IO_error(error_ID = 891, ext_msg = trim(spectral_solver)) - - end select - + call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver)) + + end select + case(FIELD_THERMAL_ID) call spectral_thermal_init @@ -428,29 +424,30 @@ program DAMASK_spectral allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND) outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND) call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce') call MPI_file_open(PETSC_COMM_WORLD, & trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', & MPI_MODE_WRONLY + MPI_MODE_APPEND, & MPI_INFO_NULL, & resUnit, & ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open') call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position') fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me) call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek') if (.not. appendToOutFile) then ! if not restarting, write 0th increment - do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output - outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & - min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) - call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),& - [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & - (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,& + do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output + outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & + min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) + call MPI_file_write(resUnit, & + reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), & + [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & + (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, & MPI_DOUBLE, MPI_STATUS_IGNORE, ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write') enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position if (worldrank == 0) & @@ -489,7 +486,7 @@ program DAMASK_spectral endif timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step - forwarding: if(totalIncsCounter >= restartInc) then + forwarding: if (totalIncsCounter >= restartInc) then stepFraction = 0_pInt !-------------------------------------------------------------------------------------------------- @@ -595,7 +592,7 @@ program DAMASK_spectral guess,timeinc,timeIncOld,remainingLoadCaseTime) end select - if(.not. solres(field)%converged) exit ! no solution found + if (.not. solres(field)%converged) exit ! no solution found enddo stagIter = stagIter + 1_pInt stagIterate = stagIter < stagItMax .and. & From f9c3d335fcf60a1bba377215191a0ff98668e902 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 11:59:04 -0400 Subject: [PATCH 29/88] two new methods to convey deemphasis and deletion --- lib/damask/util.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/lib/damask/util.py b/lib/damask/util.py index cfc44f26c..4aec363c5 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -20,6 +20,7 @@ class bcolors: FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' + DIM = '\033[2m' UNDERLINE = '\033[4m' def disable(self): @@ -70,9 +71,19 @@ def report_geom(info, # ----------------------------- def emph(what): - """emphasizes string on screen""" + """boldens string""" return bcolors.BOLD+srepr(what)+bcolors.ENDC +# ----------------------------- +def deemph(what): + """dims string""" + return bcolors.DIM+srepr(what)+bcolors.ENDC + +# ----------------------------- +def delete(what): + """dims string""" + return bcolors.DIM+srepr(what)+bcolors.ENDC + # ----------------------------- def execute(cmd, streamIn = None, From d3eb3451e3c9bca2e97fb75537737c7670a2029d Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:00:10 -0400 Subject: [PATCH 30/88] improved readability by splitting to one file per line --- code/Makefile | 113 +++++++++++++++++++++++++++++++++++++------------- 1 file changed, 85 insertions(+), 28 deletions(-) diff --git a/code/Makefile b/code/Makefile index 7e5a130b1..331feec27 100644 --- a/code/Makefile +++ b/code/Makefile @@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\ #-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions. #-Wstrict-overflow: -DEBUG_OPTIONS_gfortran :=-g\ - -fbacktrace\ - -fdump-core\ - -fcheck=all\ +DEBUG_OPTIONS_gfortran :=-g \ + -fbacktrace \ + -fdump-core \ + -fcheck=all \ -ffpe-trap=invalid,zero,overflow ################################################################################################### @@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$ COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90)) ################################################################################################### SOURCE_FILES = \ - source_thermal_dissipation.o source_thermal_externalheat.o \ - source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \ - source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o + source_thermal_dissipation.o \ + source_thermal_externalheat.o \ + source_damage_isoBrittle.o \ + source_damage_isoDuctile.o \ + source_damage_anisoBrittle.o \ + source_damage_anisoDuctile.o \ + source_vacancy_phenoplasticity.o \ + source_vacancy_irradiation.o \ + source_vacancy_thermalfluc.o KINEMATICS_FILES = \ - kinematics_cleavage_opening.o kinematics_slipplane_opening.o \ + kinematics_cleavage_opening.o \ + kinematics_slipplane_opening.o \ kinematics_thermal_expansion.o \ - kinematics_vacancy_strain.o kinematics_hydrogen_strain.o + kinematics_vacancy_strain.o \ + kinematics_hydrogen_strain.o PLASTIC_FILES = \ - plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \ - plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \ + plastic_dislotwin.o \ + plastic_disloUCLA.o \ + plastic_isotropic.o \ + plastic_phenopowerlaw.o \ + plastic_titanmod.o \ + plastic_nonlocal.o \ + plastic_none.o \ plastic_phenoplus.o THERMAL_FILES = \ - thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o + thermal_isothermal.o \ + thermal_adiabatic.o \ + thermal_conduction.o DAMAGE_FILES = \ - damage_none.o damage_local.o damage_nonlocal.o + damage_none.o \ + damage_local.o \ + damage_nonlocal.o VACANCYFLUX_FILES = \ - vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o + vacancyflux_isoconc.o \ + vacancyflux_isochempot.o \ + vacancyflux_cahnhilliard.o POROSITY_FILES = \ - porosity_none.o porosity_phasefield.o + porosity_none.o \ + porosity_phasefield.o HYDROGENFLUX_FILES = \ - hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o + hydrogenflux_isoconc.o \ + hydrogenflux_cahnhilliard.o HOMOGENIZATION_FILES = \ - homogenization_RGC.o homogenization_isostrain.o homogenization_none.o + homogenization_RGC.o \ + homogenization_isostrain.o \ + homogenization_none.o ##################### # Spectral Solver @@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90 SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \ spectral_thermal.o spectral_damage.o -SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \ - FEsolving.o mesh.o material.o lattice.o \ - $(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ +SPECTRAL_FILES = C_routines.o \ + system_routines.o \ + prec.o \ + DAMASK_interface.o \ + IO.o \ + numerics.o \ + debug.o \ + math.o \ + FEsolving.o \ + mesh.o \ + material.o \ + lattice.o \ + $(SOURCE_FILES) \ + $(KINEMATICS_FILES) \ + $(PLASTIC_FILES) \ + constitutive.o \ crystallite.o \ - $(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ + $(THERMAL_FILES) \ + $(DAMAGE_FILES) \ + $(VACANCYFLUX_FILES) \ + $(HYDROGENFLUX_FILES) \ + $(POROSITY_FILES) \ $(HOMOGENIZATION_FILES) homogenization.o \ CPFEM2.o \ spectral_utilities.o \ @@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./ FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o -FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \ - FEsolving.o mesh.o material.o lattice.o \ - $(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ +FEM_FILES = prec.o \ + DAMASK_interface.o \ + FEZoo.o \ + IO.o \ + numerics.o \ + debug.o \ + math.o \ + FEsolving.o \ + mesh.o \ + material.o \ + lattice.o \ + $(SOURCE_FILES) \ + $(KINEMATICS_FILES) \ + $(PLASTIC_FILES) \ + constitutive.o \ crystallite.o \ - $(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ + $(THERMAL_FILES) \ + $(DAMAGE_FILES) \ + $(VACANCYFLUX_FILES) \ + $(HYDROGENFLUX_FILES) \ + $(POROSITY_FILES) \ $(HOMOGENIZATION_FILES) homogenization.o \ CPFEM.o \ - FEM_utilities.o $(FEM_SOLVER_FILES) + FEM_utilities.o \ + $(FEM_SOLVER_FILES) DAMASK_FEM.exe: DAMASK_FEM_driver.o $(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \ @@ -658,8 +715,8 @@ tidy: @rm -rf *.inst.f90 # for instrumentation @rm -rf *.pomp.f90 # for instrumentation @rm -rf *.pp.f90 # for instrumentation - @rm -rf *.pdb # for instrumnentation - @rm -rf *.opari.inc # for instrumnentation + @rm -rf *.pdb # for instrumentation + @rm -rf *.opari.inc # for instrumentation .PHONY: cleanDAMASK cleanDAMASK: From eb9f6c939c4b83e746a4a4293937e6255f32cdf3 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:08:19 -0400 Subject: [PATCH 31/88] condensed output, DAMASK_marc symlinks are relative, add symlink pruning --- installation/symlink_Code.py | 56 +++++++++++++++++++++--------- installation/symlink_Processing.py | 40 ++++++++++++++++----- 2 files changed, 71 insertions(+), 25 deletions(-) diff --git a/installation/symlink_Code.py b/installation/symlink_Code.py index 1e67c8a7b..933f31c8e 100755 --- a/installation/symlink_Code.py +++ b/installation/symlink_Code.py @@ -11,7 +11,15 @@ bin_link = { \ ], } -MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015] +MarcReleases =[ \ + '2011', + '2012', + '2013', + '2013.1', + '2014', + '2014.2', + '2015', + ] damaskEnv = damask.Environment() baseDir = damaskEnv.relPath('code/') @@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN'] if not os.path.isdir(binDir): os.mkdir(binDir) -for dir in bin_link: - for file in bin_link[dir]: - src = os.path.abspath(os.path.join(baseDir,dir,file)) - if os.path.exists(src): - sym_link = os.path.abspath(os.path.join(binDir,\ - {True: dir, - False:os.path.splitext(file)[0]}[file == ''])) - if os.path.lexists(sym_link): os.remove(sym_link) - os.symlink(src,sym_link) - sys.stdout.write(sym_link+' -> '+src+'\n') +sys.stdout.write('\nsymbolic linking...\n') +for subDir in bin_link: + theDir = os.path.abspath(os.path.join(baseDir,subDir)) + sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n') + + for theFile in bin_link[subDir]: + theName,theExt = os.path.splitext(theFile) + src = os.path.abspath(os.path.join(theDir,theFile)) + + if os.path.exists(src): + sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName)) + + if os.path.lexists(sym_link): + os.remove(sym_link) + output = theName+damask.util.deemph(theExt) + else: + output = damask.util.emph(theName)+damask.util.deemph(theExt) + + sys.stdout.write(damask.util.deemph('... ')+output+'\n') + os.symlink(src,sym_link) + + +sys.stdout.write('\nMSC.Marc versioning...\n\n') +theMaster = 'DAMASK_marc.f90' for version in MarcReleases: - src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90')) + src = os.path.abspath(os.path.join(baseDir,theMaster)) if os.path.exists(src): - sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90')) + sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version))) if os.path.lexists(sym_link): os.remove(sym_link) - sys.stdout.write(sym_link) + output = version else: - sys.stdout.write(damask.util.emph(sym_link)) + output = damask.util.emph(version) - os.symlink(src,sym_link) - sys.stdout.write(' -> '+src+'\n') + sys.stdout.write(' '+output+'\n') + os.symlink(theMaster,sym_link) diff --git a/installation/symlink_Processing.py b/installation/symlink_Processing.py index 6cb8f9135..d10b5af55 100755 --- a/installation/symlink_Processing.py +++ b/installation/symlink_Processing.py @@ -13,23 +13,47 @@ if not os.path.isdir(binDir): os.mkdir(binDir) #define ToDo list -processing_subDirs = ['pre','post','misc',] -processing_extensions = ['.py','.sh',] - +processing_subDirs = ['pre', + 'post', + 'misc', + ] +processing_extensions = ['.py', + '.sh', + ] + +sys.stdout.write('\nsymbolic linking...\n') + for subDir in processing_subDirs: theDir = os.path.abspath(os.path.join(baseDir,subDir)) + sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n') + for theFile in os.listdir(theDir): - if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions + theName,theExt = os.path.splitext(theFile) + if theExt in processing_extensions: # only consider files with proper extensions src = os.path.abspath(os.path.join(theDir,theFile)) - sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0])) + sym_link = os.path.abspath(os.path.join(binDir,theName)) if os.path.lexists(sym_link): os.remove(sym_link) - sys.stdout.write(sym_link) + output = theName+damask.util.deemph(theExt) else: - sys.stdout.write(damask.util.emph(sym_link)) + output = damask.util.emph(theName)+damask.util.deemph(theExt) + sys.stdout.write(damask.util.deemph('... ')+output+'\n') os.symlink(src,sym_link) - sys.stdout.write(' -> '+src+'\n') + + +sys.stdout.write('\npruning broken links...\n') + +brokenLinks = 0 + +for filename in os.listdir(binDir): + path = os.path.join(binDir,filename) + if os.path.islink(path) and not os.path.exists(path): + sys.stdout.write(' '+damask.util.delete(path)+'\n') + os.remove(path) + brokenLinks += 1 + +sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n') From 85abf84186cd89c9d2e8fd2444b1117fa12b14b7 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:15:03 -0400 Subject: [PATCH 32/88] generalized to user-specified mapping function instead of hardwired avg --- processing/post/averageTable.py | 47 ++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/processing/post/averageTable.py b/processing/post/averageTable.py index 25c09625c..f9c6693ed 100755 --- a/processing/post/averageTable.py +++ b/processing/post/averageTable.py @@ -2,6 +2,7 @@ # -*- coding: UTF-8 no BOM -*- import os,sys +import math # noqa import numpy as np from optparse import OptionParser import damask @@ -14,7 +15,7 @@ scriptID = ' '.join([scriptName,damask.version]) # -------------------------------------------------------------------- parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Replace all rows for which column 'label' has identical values by a single row containing their average. +Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row. Output table will contain as many rows as there are different (unique) values in the grouping column. Examples: @@ -25,11 +26,33 @@ parser.add_option('-l','--label', dest = 'label', type = 'string', metavar = 'string', help = 'column label for grouping rows') +parser.add_option('-f','--function', + dest = 'function', + type = 'string', metavar = 'string', + help = 'mapping function [%default]') +parser.add_option('-a','--all', + dest = 'all', + action = 'store_true' + help = 'apply mapping function also to grouping column') + +parser.set_defaults(function = 'np.average') (options,filenames) = parser.parse_args() +funcModule,funcName = options.function.split('.') + +try: + mapFunction = getattr(locals().get(funcModule) or + globals().get(funcModule) or + __import__(funcModule), + funcName) +except: + mapFunction = None + if options.label is None: parser.error('no grouping column specified.') +if not hasattr(mapFunction,'__call__'): + parser.error('function "{}" is not callable.'.format(options.function)) # --- loop over input files ------------------------------------------------------------------------- @@ -38,10 +61,6 @@ if filenames == []: filenames = [None] for name in filenames: try: table = damask.ASCIItable(name = name, - outname = os.path.join( - os.path.split(name)[0], - options.label+'_averaged_'+os.path.split(name)[1] - ) if name else name, buffered = False) except: continue damask.util.report(scriptName,name) @@ -53,6 +72,8 @@ for name in filenames: damask.util.croak('column {} is not of scalar dimension.'.format(options.label)) table.close(dismiss = True) # close ASCIItable and remove empty file continue + else: + grpColumn = table.label_index(options.label) # ------------------------------------------ assemble info --------------------------------------- @@ -64,17 +85,17 @@ for name in filenames: table.data_readArray() rows,cols = table.data.shape - table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])] + table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn - values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True) - index = np.append(index,rows) - avgTable = np.empty((len(values), cols)) + values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions + index = np.append(index,rows) # add termination position + grpTable = np.empty((len(values), cols)) # initialize output - for j in xrange(cols) : - for i in xrange(len(values)) : - avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j]) + for i in xrange(len(values)): # iterate over groups (unique values in grpColumn) + grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function + if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value - table.data = avgTable + table.data = grpTable # ------------------------------------------ output result ------------------------------- From 55d6adf1b84e5164143e08b0f7f5ad252ef9b9dc Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:15:41 -0400 Subject: [PATCH 33/88] renamed to reflect more general nature of script --- processing/post/{averageTable.py => groupTable.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename processing/post/{averageTable.py => groupTable.py} (100%) diff --git a/processing/post/averageTable.py b/processing/post/groupTable.py similarity index 100% rename from processing/post/averageTable.py rename to processing/post/groupTable.py From 891ac4d5855800d40ff5bac96dc0e8598eddfe34 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:17:27 -0400 Subject: [PATCH 34/88] fixed comma syntax error --- processing/post/groupTable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/post/groupTable.py b/processing/post/groupTable.py index f9c6693ed..924105fba 100755 --- a/processing/post/groupTable.py +++ b/processing/post/groupTable.py @@ -32,7 +32,7 @@ parser.add_option('-f','--function', help = 'mapping function [%default]') parser.add_option('-a','--all', dest = 'all', - action = 'store_true' + action = 'store_true', help = 'apply mapping function also to grouping column') parser.set_defaults(function = 'np.average') From 2d43dbc88156cbde1751ca91a065cec533aab09e Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 26 Aug 2016 04:27:18 +0200 Subject: [PATCH 35/88] updated version information after successful test of v2.0.1-57-g891ac4d --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index a0ee30953..ea7f6e12c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-49-gf984f1e +v2.0.1-57-g891ac4d From 4b02a55f4d34f45986ccf9e3e9312eb6e9d86b1f Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 30 Aug 2016 16:08:47 -0400 Subject: [PATCH 36/88] explicit type casting from boolean to int --- processing/post/vtk_rectilinearGrid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/post/vtk_rectilinearGrid.py b/processing/post/vtk_rectilinearGrid.py index 73a64c4aa..dfe376b3f 100755 --- a/processing/post/vtk_rectilinearGrid.py +++ b/processing/post/vtk_rectilinearGrid.py @@ -79,9 +79,9 @@ for name in filenames: coords = [np.unique(table.data[:,i]) for i in xrange(3)] if options.mode == 'cell': - coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \ + coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \ [coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \ - [3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)] + [3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)] grid = np.array(map(len,coords),'i') N = grid.prod() if options.mode == 'point' else (grid-1).prod() From 22e7d6d4e129a9dd46a9c9c9c8b02cb7c81d029d Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 31 Aug 2016 04:26:22 +0200 Subject: [PATCH 37/88] updated version information after successful test of v2.0.1-59-g4b02a55 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index ea7f6e12c..8a765d7c8 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-57-g891ac4d +v2.0.1-59-g4b02a55 From d529eae4a44598626badb64d5d9af9dedb8a5e8c Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 31 Aug 2016 22:54:00 -0400 Subject: [PATCH 38/88] fixed MPI_reduce hiccup see http://stackoverflow.com/questions/17741574/in-place-mpi-reduce-crashes-with-openmpi --- code/spectral_utilities.f90 | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/code/spectral_utilities.f90 b/code/spectral_utilities.f90 index b9ed5245d..bb11ff294 100644 --- a/code/spectral_utilities.f90 +++ b/code/spectral_utilities.f90 @@ -237,7 +237,7 @@ subroutine utilities_init() grid1Red = grid(1)/2_pInt + 1_pInt wgt = 1.0/real(product(grid),pReal) - if (worldrank == 0) then + if (worldrank == 0_pInt) then write(6,'(a,3(i12 ))') ' grid a b c: ', grid write(6,'(a,3(es12.5))') ' size x y z: ', geomSize endif @@ -1015,10 +1015,19 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & defgradDetMax = max(defgradDetMax,defgradDet) defgradDetMin = min(defgradDetMin,defgradDet) end do - call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') - call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') + + if (worldrank == 0_pInt) then + call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) + else + call MPI_reduce(defgradDetMax,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) + endif + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') + if (worldrank == 0_pInt) then + call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) + else + call MPI_reduce(defgradDetMin,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) + endif + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') if (worldrank == 0_pInt) then write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin From 6db0a42eaef45ff7b65da7006bd5140377fb76a0 Mon Sep 17 00:00:00 2001 From: Chen Date: Thu, 1 Sep 2016 12:47:26 -0400 Subject: [PATCH 39/88] addvtk data now support tensor type (9 components) --- processing/post/vtk_addPointcloudData.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 340ef700e..cd767d5f4 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -37,12 +37,17 @@ parser.add_option('-v', '--vector', dest = 'vector', action = 'extend', metavar = '', help = 'vector value label(s)') +parser.add_option('-t', '--tensor', + dest = 'tensor', + action = 'extend', metavar = '', + help = 'tensor (3x3) value label(s)') parser.add_option('-c', '--color', dest='color', action='extend', metavar ='', help = 'RGB color tuples') parser.set_defaults(scalar = [], vector = [], + tensor = [], color = [], inplace = False, render = False, @@ -94,9 +99,10 @@ for name in filenames: errors = [] VTKarray = {} active = defaultdict(list) - + for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], + ['tensor',9,options.tensor], ['color',3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): @@ -107,7 +113,7 @@ for name in filenames: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() + if datatype in ['scalar','vector', 'tensor']: VTKarray[me] = vtk.vtkDoubleArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() VTKarray[me].SetNumberOfComponents(dimension) @@ -119,20 +125,21 @@ for name in filenames: table.close(dismiss = True) continue -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- while table.data_read(): # read next data line of ASCII table - + for datatype,labels in active.items(): # loop over scalar,color for me in labels: # loop over all requested items theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) table.input_close() # close input ASCII table -# ------------------------------------------ add data --------------------------------------- +# ------------------------------------------ add data --------------------------------------- for datatype,labels in active.items(): # loop over scalar,color if datatype == 'color': @@ -145,7 +152,7 @@ for name in filenames: Polydata.Modified() if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update() -# ------------------------------------------ output result --------------------------------------- +# ------------------------------------------ output result --------------------------------------- writer = vtk.vtkXMLPolyDataWriter() writer.SetDataModeToBinary() @@ -155,7 +162,7 @@ for name in filenames: else: writer.SetInputData(Polydata) writer.Write() -# ------------------------------------------ render result --------------------------------------- +# ------------------------------------------ render result --------------------------------------- if options.render: mapper = vtk.vtkDataSetMapper() @@ -179,7 +186,7 @@ if options.render: iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) - + iren.Initialize() renWin.Render() iren.Start() From 32c4a20a46c5e7b9d0e1a3c4d8e5bd2203d57d8e Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 1 Sep 2016 15:37:49 -0400 Subject: [PATCH 40/88] clean up, added --debug to base class, renamed testPossible to feasible --- lib/damask/test/test.py | 69 +++++++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 30 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index c05a6474d..b54615c3a 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -17,40 +17,47 @@ class Test(): variants = [] - def __init__(self,test_description): + def __init__(self,description = ''): - logger = logging.getLogger() - logger.setLevel(0) fh = logging.FileHandler('test.log') # create file handler which logs even debug messages fh.setLevel(logging.DEBUG) - full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s') - fh.setFormatter(full) + fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')) + ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level ch.setLevel(logging.INFO) -# create formatter and add it to the handlers - plain = logging.Formatter('%(message)s') - ch.setFormatter(plain) -# add the handlers to the logger + ch.setFormatter(logging.Formatter('%(message)s')) + + logger = logging.getLogger() logger.addHandler(fh) logger.addHandler(ch) + logger.setLevel(0) - logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \ - +'----------------------------------------------------------------\n' \ - +'| '+test_description+'\n' \ - +'----------------------------------------------------------------') + logging.info('\n'.join(['+'*40, + '-'*40, + '| '+description, + '-'*40, + ])) self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) - self.parser = OptionParser( - description = test_description+' (using class: {})'.format(damask.version), - usage='./test.py [options]') - self.updateRequested = False - self.parser.add_option("-d", "--debug", action="store_true",\ - dest="debug",\ - help="debug run, don't calculate but use existing results") - self.parser.add_option("-p", "--pass", action="store_true",\ - dest="accept",\ - help="calculate results but always consider test as successfull") - self.parser.set_defaults(debug=False, - accept=False) + + self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version), + usage = './test.py [options]') + self.parser.add_option("-d", "--debug", + action = "store_true", + dest = "debug", + help = "debug run, don't calculate but use existing results") + self.parser.add_option("-p", "--pass", + action = "store_true", + dest = "accept", + help = "calculate results but always consider test as successfull") + self.parser.add_option("-u", "--update", + action = "store_true", + dest = "update", + help = "use current test results as new reference" + ) + self.parser.set_defaults(debug = False, + accept = False, + update = False, + ) def execute(self): """Run all variants and report first failure.""" @@ -65,15 +72,17 @@ class Test(): return variant+1 # return culprit return 0 else: - if not self.testPossible(): return -1 + if not self.feasible(): return -1 + self.clean() self.prepareAll() - for variant in xrange(len(self.variants)): + + for variant,name in enumerate(self.variants): try: self.prepare(variant) self.run(variant) self.postprocess(variant) - if self.updateRequested: # update requested + if self.options.update: # update requested self.update(variant) elif not (self.options.accept or self.compare(variant)): # no update, do comparison return variant+1 # return culprit @@ -82,8 +91,8 @@ class Test(): return variant+1 # return culprit return 0 - def testPossible(self): - """Check if test is possible or not (e.g. no license available).""" + def feasible(self): + """Check whether test is possible or not (e.g. no license available).""" return True def clean(self): From 20d1164e5d8ed5ac2b7cd44132b9c765ecb63dfc Mon Sep 17 00:00:00 2001 From: Chen Date: Thu, 1 Sep 2016 16:57:29 -0400 Subject: [PATCH 41/88] add tensor support for vtk_addRectlinearGridData --- processing/post/vtk_addRectilinearGridData.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 63e0bf783..283a076c7 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -38,6 +38,10 @@ parser.add_option('-v', '--vector', dest = 'vector', action = 'extend', metavar = '', help = 'vector value label(s)') +parser.add_option('-t', '--tensor', + dest = 'tensor', + action = 'extend', metavar = '', + help = 'tensor (3x3) value label(s)') parser.add_option('-c', '--color', dest = 'color', action = 'extend', metavar = '', @@ -45,6 +49,7 @@ parser.add_option('-c', '--color', parser.set_defaults(scalar = [], vector = [], + tensor = [], color = [], inplace = False, render = False, @@ -92,9 +97,10 @@ for name in filenames: errors = [] VTKarray = {} active = defaultdict(list) - + for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], + ['tensor',9,options.tensor], ['color',3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): @@ -105,7 +111,7 @@ for name in filenames: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() + if datatype in ['scalar','vector','tensor']: VTKarray[me] = vtk.vtkDoubleArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() VTKarray[me].SetNumberOfComponents(dimension) @@ -117,7 +123,7 @@ for name in filenames: table.close(dismiss = True) continue -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- datacount = 0 @@ -129,11 +135,12 @@ for name in filenames: theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) table.close() # close input ASCII table -# ------------------------------------------ add data --------------------------------------- +# ------------------------------------------ add data --------------------------------------- if datacount == Npoints: mode = 'point' elif datacount == Ncells: mode = 'cell' @@ -154,7 +161,7 @@ for name in filenames: rGrid.Modified() if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update() -# ------------------------------------------ output result --------------------------------------- +# ------------------------------------------ output result --------------------------------------- writer = vtk.vtkXMLRectilinearGridWriter() writer.SetDataModeToBinary() @@ -164,7 +171,7 @@ for name in filenames: else: writer.SetInputData(rGrid) writer.Write() -# ------------------------------------------ render result --------------------------------------- +# ------------------------------------------ render result --------------------------------------- if options.render: mapper = vtk.vtkDataSetMapper() @@ -188,7 +195,7 @@ if options.render: iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) - + iren.Initialize() renWin.Render() iren.Start() From d9077805e497e05f929b759ab24ddc1abb2e9997 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 2 Sep 2016 09:30:49 -0400 Subject: [PATCH 42/88] Forced symetric tensor when adding tensor to vtk VTK addTensor by default using the lower triangle to populate its tensor object. Enforcing the tensor to be symmetric to avoid necessary confusion when adding data (most stress/strain tensor should symmetric by default, so it should not affect the results) --- processing/post/vtk_addPointcloudData.py | 13 ++++++++----- processing/post/vtk_addRectilinearGridData.py | 7 +++++-- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index cd767d5f4..557b9fdd3 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -131,11 +131,14 @@ for name in filenames: for datatype,labels in active.items(): # loop over scalar,color for me in labels: # loop over all requested items - theData = [table.data[i] for i in table.label_indexrange(me)] # read strings - if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) - elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) + theData = [float(table.data[i]) for i in table.label_indexrange(me)] # read strings + if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*x),theData)) + elif datatype == 'scalar': VTKarray[me].InsertNextValue(theData[0]) + elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*theData) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ + np.array(theData) \ + .reshape(3,3).T \ + .reshape(9))) table.input_close() # close input ASCII table diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 283a076c7..8f639711b 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -134,9 +134,12 @@ for name in filenames: for me in labels: # loop over all requested items theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) + elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ + np.array(theData) \ + .reshape(3,3).T \ + .reshape(9))) table.close() # close input ASCII table From 2b3faf204c4e745a605ce52c4d6c4691899e9712 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:30:08 -0400 Subject: [PATCH 43/88] make table compare normalize data by type (scaler, vector, tensor) --- lib/damask/test/test.py | 169 ++++++++++++++++++++-------------------- 1 file changed, 85 insertions(+), 84 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index b54615c3a..023e7cb87 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -5,6 +5,7 @@ import os,sys,shutil import logging,logging.config import damask import numpy as np +import itertools from collections import Iterable from optparse import OptionParser @@ -16,7 +17,7 @@ class Test(): """ variants = [] - + def __init__(self,description = ''): fh = logging.FileHandler('test.log') # create file handler which logs even debug messages @@ -90,11 +91,11 @@ class Test(): logging.critical('\nWARNING:\n {}\n'.format(e)) return variant+1 # return culprit return 0 - + def feasible(self): """Check whether test is possible or not (e.g. no license available).""" return True - + def clean(self): """Delete directory tree containing current results.""" status = True @@ -112,7 +113,7 @@ class Test(): status = status and False return status - + def prepareAll(self): """Do all necessary preparations for the whole test""" return True @@ -120,7 +121,7 @@ class Test(): def prepare(self,variant): """Do all necessary preparations for the run of each test variant""" return True - + def run(self,variant): """Execute the requested test variant.""" @@ -152,17 +153,17 @@ class Test(): """Directory containing current results of the test.""" return os.path.normpath(os.path.join(self.dirBase,'current/')) - + def dirProof(self): """Directory containing human readable proof of correctness for the test.""" return os.path.normpath(os.path.join(self.dirBase,'proof/')) - + def fileInRoot(self,dir,file): """Path to a file in the root directory of DAMASK.""" return os.path.join(damask.Environment().rootDir(),dir,file) - + def fileInReference(self,file): """Path to a file in the refrence directory for the test.""" return os.path.join(self.dirReference(),file) @@ -172,7 +173,7 @@ class Test(): """Path to a file in the current results directory for the test.""" return os.path.join(self.dirCurrent(),file) - + def fileInProof(self,file): """Path to a file in the proof directory for the test.""" return os.path.join(self.dirProof(),file) @@ -189,58 +190,58 @@ class Test(): for source,target in zip(map(mapA,A),map(mapB,B)): try: - shutil.copy2(source,target) + shutil.copy2(source,target) except: logging.critical('error copying {} to {}'.format(source,target)) def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Reference2Current: Unable to copy file "{}"'.format(file)) - + def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): - + source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i])) except: logging.error(os.path.join(source,file)) logging.critical('Base2Current: Unable to copy file "{}"'.format(file)) def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i])) + shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i])) except: logging.critical('Current2Reference: Unable to copy file "{}"'.format(file)) - + def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Proof2Current: Unable to copy file "{}"'.format(file)) - + def copy_Current2Current(self,sourcefiles=[],targetfiles=[]): - + for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Current2Current: Unable to copy file "{}"'.format(file)) @@ -252,11 +253,11 @@ class Test(): logging.info(error) logging.debug(out) - - return out,error - - + return out,error + + + def compare_Array(self,File1,File2): import numpy as np @@ -287,28 +288,28 @@ class Test(): def compare_ArrayRefCur(self,ref,cur=''): - + if cur =='': cur = ref refName = self.fileInReference(ref) curName = self.fileInCurrent(cur) return self.compare_Array(refName,curName) - + def compare_ArrayCurCur(self,cur0,cur1): - + cur0Name = self.fileInCurrent(cur0) cur1Name = self.fileInCurrent(cur1) return self.compare_Array(cur0Name,cur1Name) def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None, absoluteTolerance=False,perLine=False,skipLines=[]): - + import numpy as np logging.info('\n '.join(['comparing ASCII Tables',file0,file1])) if normHeadings == '': normHeadings = headings0 # check if comparison is possible and determine lenght of columns - if len(headings0) == len(headings1) == len(normHeadings): + if len(headings0) == len(headings1) == len(normHeadings): dataLength = len(headings0) length = [1 for i in xrange(dataLength)] shape = [[] for i in xrange(dataLength)] @@ -316,14 +317,14 @@ class Test(): maxError = [0.0 for i in xrange(dataLength)] absTol = [absoluteTolerance for i in xrange(dataLength)] column = [[1 for i in xrange(dataLength)] for j in xrange(2)] - + norm = [[] for i in xrange(dataLength)] normLength = [1 for i in xrange(dataLength)] normShape = [[] for i in xrange(dataLength)] normColumn = [1 for i in xrange(dataLength)] for i in xrange(dataLength): - if headings0[i]['shape'] != headings1[i]['shape']: + if headings0[i]['shape'] != headings1[i]['shape']: raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) shape[i] = headings0[i]['shape'] for j in xrange(np.shape(shape[i])[0]): @@ -339,7 +340,7 @@ class Test(): table0 = damask.ASCIItable(name=file0,readonly=True) table0.head_read() table1 = damask.ASCIItable(name=file1,readonly=True) - table1.head_read() + table1.head_read() for i in xrange(dataLength): key0 = ('1_' if length[i]>1 else '') + headings0[i]['label'] @@ -355,7 +356,7 @@ class Test(): column[0][i] = table0.label_index(key0) column[1][i] = table1.label_index(key1) normColumn[i] = table0.label_index(normKey) - + line0 = 0 while table0.data_read(): # read next data line of ASCII table if line0 not in skipLines: @@ -370,7 +371,7 @@ class Test(): else: norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType)) line0 += 1 - + for i in xrange(dataLength): if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))] data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]]) @@ -441,14 +442,14 @@ class Test(): logging.info(files[i]+':'+','.join(columns[i])) if len(files) < 2: return True # single table is always close to itself... - + data = [] for table,labels in zip(tables,columns): table.data_readArray(labels) data.append(table.data) table.close() - - + + for i in xrange(1,len(data)): delta = data[i]-data[i-1] normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 @@ -457,7 +458,7 @@ class Test(): std = np.amax(np.std(normedDelta,0)) logging.info('mean: {:f}'.format(mean)) logging.info('std: {:f}'.format(std)) - + return (mean0.0, maximum, 1) # avoid div by zero for empty columns + + maximum = np.where(maximum > 0.0, maximum, 1) # avoid div by zero for empty columns + + + # normalize each table for i in xrange(len(data)): data[i] /= maximum - - mask = np.zeros_like(table.data,dtype='bool') - for table in data: - mask |= np.where(np.abs(table) Date: Fri, 2 Sep 2016 14:31:00 -0400 Subject: [PATCH 44/88] remove unused import for test.py --- lib/damask/test/test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index 023e7cb87..cffc318a6 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -5,7 +5,6 @@ import os,sys,shutil import logging,logging.config import damask import numpy as np -import itertools from collections import Iterable from optparse import OptionParser From 4c6b8c490f015b66a25d57820e3917f9c14fe3d4 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:34:35 -0400 Subject: [PATCH 45/88] change one-line docstring to fit in one line. --- lib/damask/test/test.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index cffc318a6..b64485234 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -468,9 +468,7 @@ class Test(): rtol = 1e-5, atol = 1e-8, debug = False): - """ - compare tables with np.allclose - """ + """ compare tables with np.allclose """ if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] From d41ac4e46354abd134fb65c65d0dbc6f8f9b8969 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:35:23 -0400 Subject: [PATCH 46/88] remove white space in docstring --- lib/damask/test/test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index b64485234..10fb993e2 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -468,7 +468,7 @@ class Test(): rtol = 1e-5, atol = 1e-8, debug = False): - """ compare tables with np.allclose """ + """compare tables with np.allclose""" if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] From a2f3839ca84a1f6ee006b796dcecf409b1c2d2bd Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:37:15 -0400 Subject: [PATCH 47/88] add missing numpy import --- processing/post/vtk_addPointcloudData.py | 1 + processing/post/vtk_addRectilinearGridData.py | 1 + 2 files changed, 2 insertions(+) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 557b9fdd3..9736e54d0 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -3,6 +3,7 @@ import os,vtk import damask +import numpy as np from collections import defaultdict from optparse import OptionParser diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 8f639711b..df85288af 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -3,6 +3,7 @@ import os,vtk import damask +import numpy as np from collections import defaultdict from optparse import OptionParser From 9512231d49ed3cb9fad38ed5b57af52a843dbcd1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 13:53:41 +0200 Subject: [PATCH 48/88] cleaning --- code/debug.f90 | 41 ++++++++++------------------------------- 1 file changed, 10 insertions(+), 31 deletions(-) diff --git a/code/debug.f90 b/code/debug.f90 index 21a5443fe..03a0d6f08 100644 --- a/code/debug.f90 +++ b/code/debug.f90 @@ -104,7 +104,6 @@ contains subroutine debug_init use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) use numerics, only: & - worldrank, & nStress, & nState, & nCryst, & @@ -130,47 +129,27 @@ subroutine debug_init integer(pInt), allocatable, dimension(:) :: chunkPos character(len=65536) :: tag, line - mainProcess: if (worldrank == 0) then - write(6,'(/,a)') ' <<<+- debug init -+>>>' - write(6,'(a15,a)') ' Current time: ',IO_timeStamp() + write(6,'(/,a)') ' <<<+- debug init -+>>>' + write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - endif mainProcess - if (allocated(debug_StressLoopLpDistribution)) & - deallocate(debug_StressLoopLpDistribution) - allocate(debug_StressLoopLpDistribution(nStress+1,2)) - debug_StressLoopLpDistribution = 0_pInt - if (allocated(debug_StressLoopLiDistribution)) & - deallocate(debug_StressLoopLiDistribution) - allocate(debug_StressLoopLiDistribution(nStress+1,2)) - debug_StressLoopLiDistribution = 0_pInt - if (allocated(debug_StateLoopDistribution)) & - deallocate(debug_StateLoopDistribution) - allocate(debug_StateLoopDistribution(nState+1,2)) - debug_StateLoopDistribution = 0_pInt - if (allocated(debug_CrystalliteLoopDistribution)) & - deallocate(debug_CrystalliteLoopDistribution) - allocate(debug_CrystalliteLoopDistribution(nCryst+1)) - debug_CrystalliteLoopDistribution = 0_pInt - if (allocated(debug_MaterialpointStateLoopDistribution)) & - deallocate(debug_MaterialpointStateLoopDistribution) - allocate(debug_MaterialpointStateLoopDistribution(nMPstate)) - debug_MaterialpointStateLoopDistribution = 0_pInt - if (allocated(debug_MaterialpointLoopDistribution)) & - deallocate(debug_MaterialpointLoopDistribution) - allocate(debug_MaterialpointLoopDistribution(nHomog+1)) - debug_MaterialpointLoopDistribution = 0_pInt + allocate(debug_StressLoopLpDistribution(nStress+1,2), source=0_pInt) + allocate(debug_StressLoopLiDistribution(nStress+1,2), source=0_pInt) + allocate(debug_StateLoopDistribution(nState+1,2), source=0_pInt) + allocate(debug_CrystalliteLoopDistribution(nCryst+1), source=0_pInt) + allocate(debug_MaterialpointStateLoopDistribution(nMPstate), source=0_pInt) + allocate(debug_MaterialpointLoopDistribution(nHomog+1), source=0_pInt) !-------------------------------------------------------------------------------------------------- ! try to open the config file line = '' fileExists: if(IO_open_file_stat(FILEUNIT,debug_configFile)) then - do while (trim(line) /= IO_EOF) ! read thru sections of phase part + do while (trim(line) /= IO_EOF) ! read thru sections of phase part line = IO_read(FILEUNIT) if (IO_isBlank(line)) cycle ! skip empty lines chunkPos = IO_stringPos(line) - tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key + tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key select case(tag) case ('element','e','el') debug_e = IO_intValue(line,chunkPos,2_pInt) From 6207f1e7d918d0f8e3e45670608941de5d3ceb67 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 14:25:45 +0200 Subject: [PATCH 49/88] reporting MPI processes --- code/spectral_interface.f90 | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/code/spectral_interface.f90 b/code/spectral_interface.f90 index 31b198806..d49a54411 100644 --- a/code/spectral_interface.f90 +++ b/code/spectral_interface.f90 @@ -57,7 +57,8 @@ subroutine DAMASK_interface_init() integer :: & i, & threadLevel, & - worldrank = 0 + worldrank = 0, & + worldsize = 0 integer, allocatable, dimension(:) :: & chunkPos integer, dimension(8) :: & @@ -66,6 +67,7 @@ subroutine DAMASK_interface_init() external :: & quit,& MPI_Comm_rank,& + MPI_Comm_size,& PETScInitialize, & MPI_Init_Thread, & MPI_abort @@ -77,17 +79,17 @@ subroutine DAMASK_interface_init() #ifdef _OPENMP call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,ierr);CHKERRQ(ierr) ! in case of OpenMP, don't rely on PETScInitialize doing MPI init if (threadLevel>>' #include "compilation_info.f90" From 0d43dfb2f7eef0cfb13e2f1e85b70836312702bb Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 14:27:56 +0200 Subject: [PATCH 50/88] allreduce makes sense here as for all other processes, worldrank check not needed anymore --- code/spectral_utilities.f90 | 97 +++++++++++++------------------------ 1 file changed, 34 insertions(+), 63 deletions(-) diff --git a/code/spectral_utilities.f90 b/code/spectral_utilities.f90 index b9ed5245d..326d7eabb 100644 --- a/code/spectral_utilities.f90 +++ b/code/spectral_utilities.f90 @@ -172,8 +172,7 @@ subroutine utilities_init() memory_efficient, & petsc_defaultOptions, & petsc_options, & - divergence_correction, & - worldrank + divergence_correction use debug, only: & debug_level, & debug_SPECTRAL, & @@ -212,11 +211,9 @@ subroutine utilities_init() vecSize = 3_C_INTPTR_T, & tensorSize = 9_C_INTPTR_T - mainProcess: if (worldrank == 0) then - write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>' - write(6,'(a15,a)') ' Current time: ',IO_timeStamp() + write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>' + write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - endif mainProcess !-------------------------------------------------------------------------------------------------- ! set debugging parameters @@ -224,11 +221,11 @@ subroutine utilities_init() debugRotation = iand(debug_level(debug_SPECTRAL),debug_SPECTRALROTATION) /= 0 debugPETSc = iand(debug_level(debug_SPECTRAL),debug_SPECTRALPETSC) /= 0 - if(debugPETSc .and. worldrank == 0_pInt) write(6,'(3(/,a),/)') & + if(debugPETSc) write(6,'(3(/,a),/)') & ' Initializing PETSc with debug options: ', & trim(PETScDebug), & - ' add more using the PETSc_Options keyword in numerics.config ' - flush(6) + ' add more using the PETSc_Options keyword in numerics.config '; flush(6) + call PetscOptionsClear(ierr); CHKERRQ(ierr) if(debugPETSc) call PetscOptionsInsertString(trim(PETSCDEBUG),ierr); CHKERRQ(ierr) call PetscOptionsInsertString(trim(petsc_defaultOptions),ierr); CHKERRQ(ierr) @@ -237,10 +234,8 @@ subroutine utilities_init() grid1Red = grid(1)/2_pInt + 1_pInt wgt = 1.0/real(product(grid),pReal) - if (worldrank == 0) then - write(6,'(a,3(i12 ))') ' grid a b c: ', grid - write(6,'(a,3(es12.5))') ' size x y z: ', geomSize - endif + write(6,'(a,3(i12 ))') ' grid a b c: ', grid + write(6,'(a,3(es12.5))') ' size x y z: ', geomSize select case (spectral_derivative) case ('continuous') ! default, no weighting @@ -342,8 +337,7 @@ subroutine utilities_init() if (pReal /= C_DOUBLE .or. pInt /= C_INT) call IO_error(0_pInt,ext_msg='Fortran to C') ! check for correct precision in C call fftw_set_timelimit(fftw_timelimit) ! set timelimit for plan creation - if (debugGeneral .and. worldrank == 0_pInt) write(6,'(/,a)') ' FFTW initialized' - flush(6) + if (debugGeneral) write(6,'(/,a)') ' FFTW initialized'; flush(6) !-------------------------------------------------------------------------------------------------- ! calculation of discrete angular frequencies, ordered as in FFTW (wrap around) @@ -527,8 +521,6 @@ subroutine utilities_fourierGammaConvolution(fieldAim) use math, only: & math_det33, & math_invert - use numerics, only: & - worldrank use mesh, only: & grid3, & grid, & @@ -545,10 +537,8 @@ subroutine utilities_fourierGammaConvolution(fieldAim) logical :: err - if (worldrank == 0_pInt) then - write(6,'(/,a)') ' ... doing gamma convolution ...............................................' - flush(6) - endif + write(6,'(/,a)') ' ... doing gamma convolution ...............................................' + flush(6) !-------------------------------------------------------------------------------------------------- ! do the actual spectral method calculation (mechanical equilibrium) @@ -624,8 +614,6 @@ end subroutine utilities_fourierGreenConvolution real(pReal) function utilities_divergenceRMS() use IO, only: & IO_error - use numerics, only: & - worldrank use mesh, only: & geomSize, & grid, & @@ -638,10 +626,9 @@ real(pReal) function utilities_divergenceRMS() external :: & MPI_Allreduce - if (worldrank == 0_pInt) then - write(6,'(/,a)') ' ... calculating divergence ................................................' - flush(6) - endif + write(6,'(/,a)') ' ... calculating divergence ................................................' + flush(6) + rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal) !-------------------------------------------------------------------------------------------------- @@ -680,8 +667,6 @@ end function utilities_divergenceRMS real(pReal) function utilities_curlRMS() use IO, only: & IO_error - use numerics, only: & - worldrank use mesh, only: & geomSize, & grid, & @@ -693,13 +678,11 @@ real(pReal) function utilities_curlRMS() complex(pReal), dimension(3) :: rescaledGeom external :: & - MPI_Reduce, & MPI_Allreduce - if (worldrank == 0_pInt) then - write(6,'(/,a)') ' ... calculating curl ......................................................' - flush(6) - endif + write(6,'(/,a)') ' ... calculating curl ......................................................' + flush(6) + rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal) !-------------------------------------------------------------------------------------------------- @@ -757,8 +740,6 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) prec_isNaN use IO, only: & IO_error - use numerics, only: & - worldrank use math, only: & math_Plain3333to99, & math_plain99to3333, & @@ -790,7 +771,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) allocate (sTimesC(size_reduced,size_reduced), source =0.0_pReal) temp99_Real = math_Plain3333to99(math_rotate_forward3333(C,rot_BC)) - if(debugGeneral .and. worldrank == 0_pInt) then + if(debugGeneral) then write(6,'(/,a)') ' ... updating masked compliance ............................................' write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',& transpose(temp99_Real)/1.e9_pReal @@ -831,7 +812,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) if(m/=n .and. abs(sTimesC(m,n)) > (0.0_pReal + 10.0e-12_pReal)) errmatinv = .true. ! off diagonal elements of S*C should be 0 enddo enddo - if((debugGeneral .or. errmatinv) .and. (worldrank == 0_pInt)) then ! report + if(debugGeneral .or. errmatinv) then write(formatString, '(I16.16)') size_reduced formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))' write(6,trim(formatString),advance='no') ' C * S (load) ', & @@ -845,7 +826,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) else temp99_real = 0.0_pReal endif - if(debugGeneral .and. worldrank == 0_pInt) & ! report + if(debugGeneral) & write(6,'(/,a,/,9(9(2x,f12.7,1x)/),/)',advance='no') ' Masked Compliance (load) * GPa =', & transpose(temp99_Real*1.e9_pReal) flush(6) @@ -938,15 +919,11 @@ end subroutine utilities_fourierTensorDivergence !-------------------------------------------------------------------------------------------------- subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & P,C_volAvg,C_minmaxAvg,P_av,forwardData,rotation_BC) - use prec, only: & - dNeq use IO, only: & IO_error use debug, only: & debug_reset, & debug_info - use numerics, only: & - worldrank use math, only: & math_transpose33, & math_rotate_forward33, & @@ -974,7 +951,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & real(pReal),intent(out), dimension(3,3,3,3) :: C_volAvg, C_minmaxAvg !< average stiffness real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress - real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress + real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress logical :: & age @@ -985,13 +962,10 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet external :: & - MPI_Reduce, & MPI_Allreduce - if (worldrank == 0_pInt) then - write(6,'(/,a)') ' ... evaluating constitutive response ......................................' - flush(6) - endif + write(6,'(/,a)') ' ... evaluating constitutive response ......................................' + flush(6) age = .False. if (forwardData) then ! aging results @@ -1015,15 +989,14 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & defgradDetMax = max(defgradDetMax,defgradDet) defgradDetMin = min(defgradDetMin,defgradDet) end do - call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') - call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') - if (worldrank == 0_pInt) then - write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax - write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin - flush(6) - endif + + call MPI_Allreduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD,ierr) + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') + call MPI_Allreduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD,ierr) + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') + write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax + write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin + flush(6) endif call CPFEM_general(age,timeinc) @@ -1061,15 +1034,13 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & P = reshape(materialpoint_P, [3,3,grid(1),grid(2),grid3]) P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) - if (debugRotation .and. worldrank == 0_pInt) & + if (debugRotation) & write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',& math_transpose33(P_av)*1.e-6_pReal P_av = math_rotate_forward33(P_av,rotation_BC) - if (worldrank == 0_pInt) then - write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& + write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& math_transpose33(P_av)*1.e-6_pReal - flush(6) - endif + flush(6) end subroutine utilities_constitutiveResponse From a6940ab84f0ea0b3e30f37926015b0a221376cf4 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 14:29:39 +0200 Subject: [PATCH 51/88] did not compile --- code/IO.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/IO.f90 b/code/IO.f90 index 22b56d819..f5db5c1f1 100644 --- a/code/IO.f90 +++ b/code/IO.f90 @@ -142,7 +142,7 @@ recursive function IO_read(fileUnit,reset) result(line) pathOn(stack) = path(1:scan(path,SEP,.true.))//input ! glue include to current file's dir endif - open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack),action=read) ! open included file + open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack),action='read') ! open included file if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=pathOn(stack)) line = IO_read(fileUnit) From d94db61534340eecceab407961e438403f13bba5 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 17:01:10 +0200 Subject: [PATCH 52/88] update only supported by minority of the tests --- lib/damask/test/test.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index 10fb993e2..f2d000ab0 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -49,14 +49,8 @@ class Test(): action = "store_true", dest = "accept", help = "calculate results but always consider test as successfull") - self.parser.add_option("-u", "--update", - action = "store_true", - dest = "update", - help = "use current test results as new reference" - ) self.parser.set_defaults(debug = False, accept = False, - update = False, ) def execute(self): @@ -82,7 +76,7 @@ class Test(): self.prepare(variant) self.run(variant) self.postprocess(variant) - if self.options.update: # update requested + if self.updateRequested: # update requested self.update(variant) elif not (self.options.accept or self.compare(variant)): # no update, do comparison return variant+1 # return culprit From 5fec85a159ec5bc39d9cab262cef91c371c1087a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 3 Sep 2016 17:01:39 +0200 Subject: [PATCH 53/88] not needed any more --- code/math.f90 | 1 - 1 file changed, 1 deletion(-) diff --git a/code/math.f90 b/code/math.f90 index 8694e30ee..fba431a56 100644 --- a/code/math.f90 +++ b/code/math.f90 @@ -178,7 +178,6 @@ subroutine math_init use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) use prec, only: tol_math_check use numerics, only: & - worldrank, & fixedSeed use IO, only: IO_error, IO_timeStamp From 6ea7eeee08bec19f12adaa59fc5699d079bc71fc Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 4 Sep 2016 07:34:58 +0200 Subject: [PATCH 54/88] bugfix when restoring old behavior --- lib/damask/test/test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index f2d000ab0..78af62e95 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -41,6 +41,7 @@ class Test(): self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version), usage = './test.py [options]') + self.updateRequested = False self.parser.add_option("-d", "--debug", action = "store_true", dest = "debug", From cde7de4e9fbbb57aa8e5095b67101179a3474460 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Sun, 4 Sep 2016 18:46:53 -0400 Subject: [PATCH 55/88] improved superclass handling and compare_Tables --- lib/damask/test/test.py | 167 +++++++++++++++++++--------------------- 1 file changed, 81 insertions(+), 86 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index b54615c3a..f98f572fa 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -17,8 +17,16 @@ class Test(): variants = [] - def __init__(self,description = ''): + def __init__(self, **kwargs): + defaults = {'description': '', + 'keep': False, + 'accept': False, + 'update': False, + } + for arg in defaults.keys(): + setattr(self,arg,kwargs.get(arg) if kwargs.get(arg) else defaults[arg]) + fh = logging.FileHandler('test.log') # create file handler which logs even debug messages fh.setLevel(logging.DEBUG) fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')) @@ -34,62 +42,53 @@ class Test(): logging.info('\n'.join(['+'*40, '-'*40, - '| '+description, + '| '+self.description, '-'*40, ])) + self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) - self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version), + self.parser = OptionParser(description = '{} (Test class version: {})'.format(self.description,damask.version), usage = './test.py [options]') - self.parser.add_option("-d", "--debug", + self.parser.add_option("-k", "--keep", action = "store_true", - dest = "debug", - help = "debug run, don't calculate but use existing results") - self.parser.add_option("-p", "--pass", + dest = "keep", + help = "keep current results, just run postprocessing") + self.parser.add_option("--ok", "--accept", action = "store_true", dest = "accept", help = "calculate results but always consider test as successfull") - self.parser.add_option("-u", "--update", - action = "store_true", - dest = "update", - help = "use current test results as new reference" - ) - self.parser.set_defaults(debug = False, - accept = False, - update = False, + + self.parser.set_defaults(keep = self.keep, + accept = self.accept, + update = self.update, ) + def execute(self): """Run all variants and report first failure.""" - if self.options.debug: - for variant in xrange(len(self.variants)): - try: - self.postprocess(variant) - if not self.compare(variant): - return variant+1 # return culprit - except Exception as e : - logging.critical('\nWARNING:\n {}\n'.format(e)) - return variant+1 # return culprit - return 0 - else: + if not self.options.keep: if not self.feasible(): return -1 - self.clean() self.prepareAll() - for variant,name in enumerate(self.variants): - try: + for variant,name in enumerate(self.variants): + try: + if not self.options.keep: self.prepare(variant) self.run(variant) - self.postprocess(variant) - if self.options.update: # update requested - self.update(variant) - elif not (self.options.accept or self.compare(variant)): # no update, do comparison - return variant+1 # return culprit - except Exception as e : - logging.critical('\nWARNING:\n {}\n'.format(e)) - return variant+1 # return culprit - return 0 + + self.postprocess(variant) + + if self.options.update and not self.update(variant): + logging.critical('update for "{}" failed.'.format(name)) + elif not (self.options.accept or self.compare(variant)): # no update, do comparison + return variant+1 # return culprit + + except Exception as e : + logging.critical('exception during variant execution: {}'.format(e)) + return variant+1 # return culprit + return 0 def feasible(self): """Check whether test is possible or not (e.g. no license available).""" @@ -97,21 +96,18 @@ class Test(): def clean(self): """Delete directory tree containing current results.""" - status = True try: shutil.rmtree(self.dirCurrent()) except: logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent())) - status = status and False try: os.mkdir(self.dirCurrent()) + return True except: - logging.critical('creation of directory "{}" failed...'.format(self.dirCurrent())) - status = status and False - - return status + logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent())) + return False def prepareAll(self): """Do all necessary preparations for the whole test""" @@ -139,8 +135,8 @@ class Test(): def update(self,variant): """Update reference with current results.""" - logging.debug('Update not necessary') - return True + logging.critical('update not supported.') + return False def dirReference(self): @@ -463,21 +459,17 @@ class Test(): def compare_Tables(self, - files = [None,None], # list of file names + files = [None,None], # list of file names columns = [None], # list of list of column labels (per file) - rtol = 1e-5, - atol = 1e-8, - preFilter = -1.0, - postFilter = -1.0, - debug = False): - """ - compare tables with np.allclose - - threshold can be used to ignore small values (a negative number disables this feature) - """ + rtol = 1e-5, + atol = 1e-8, + debug = False): + """compare multiple tables with np.allclose""" if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] + if len(files) < 2: return True # single table is always close to itself... + tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files] for table in tables: table.head_read() @@ -486,7 +478,7 @@ class Test(): columns = columns[:len(files)] # truncate to same length as files for i,column in enumerate(columns): - if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all + if column is None: columns[i] = tables[i].labels(raw = False) # if no column is given, use all logging.info('comparing ASCIItables') for i in xrange(len(columns)): @@ -494,39 +486,42 @@ class Test(): ([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \ columns[i] ) - logging.info(files[i]+':'+','.join(columns[i])) + logging.info(files[i]+': '+','.join(columns[i])) + + dimensions = tables[0].label_dimension(columns[0]) # width of each requested column + maximum = np.zeros_like(columns[0],dtype=float) # one magnitude per column entry + data = [] # list of feature table extracted from each file (ASCII table) + + for i,(table,labels) in enumerate(zip(tables,columns)): + if np.any(dimensions != table.label_dimension(labels)): # check data object consistency + logging.critical('Table {} differs in data layout.'.format(files[i])) + return False + table.data_readArray(labels) # read data, ... + data.append(table.data) # ... store, ... + table.close() # ... close + + for j,label in enumerate(labels): # iterate over object labels + maximum[j] = np.maximum(\ + maximum[j], + np.amax(np.linalg.norm(table.data[:,table.label_indexrange(label)], + axis=1)) + ) # find maximum Euclidean norm across rows + + maximum = np.where(maximum > 0.0, maximum, 1.0) # avoid div by zero for zero columns + maximum = np.repeat(maximum,dimensions) # spread maximum over columns of each object - if len(files) < 2: return True # single table is always close to itself... - - maximum = np.zeros(len(columns[0]),dtype='f') - data = [] - for table,labels in zip(tables,columns): - table.data_readArray(labels) - data.append(np.where(np.abs(table.data)0.0, maximum, 1) # avoid div by zero for empty columns for i in xrange(len(data)): - data[i] /= maximum - - mask = np.zeros_like(table.data,dtype='bool') + data[i] /= maximum # normalize each table + + if debug: + logging.debug(str(maximum)) + allclose = np.absolute(data[0]-data[1]) <= (atol + rtol*np.absolute(data[1])) + for ok,valA,valB in zip(allclose,data[0],data[1]): + logging.debug('{}:\n{}\n{}'.format(ok,valA,valB)) - for table in data: - mask |= np.where(np.abs(table) Date: Mon, 5 Sep 2016 14:26:54 +0200 Subject: [PATCH 56/88] not needed any more --- code/FEsolving.f90 | 2 -- 1 file changed, 2 deletions(-) diff --git a/code/FEsolving.f90 b/code/FEsolving.f90 index bc7588ff0..8e09a1524 100644 --- a/code/FEsolving.f90 +++ b/code/FEsolving.f90 @@ -60,8 +60,6 @@ subroutine FE_init IO_warning, & IO_timeStamp use DAMASK_interface - use numerics, only: & - worldrank implicit none #if defined(Marc4DAMASK) || defined(Abaqus) From 1fc653ca861b644637eed87566259f30e7637ef0 Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 7 Sep 2016 22:14:27 +0200 Subject: [PATCH 57/88] updated version information after successful test of v2.0.1-75-g6f843ce --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 8a765d7c8..e317ce9f1 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-59-g4b02a55 +v2.0.1-75-g6f843ce From c8f832e12f569f87d58d9c887ce91a9f0c31f7c5 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 8 Sep 2016 08:43:15 +0200 Subject: [PATCH 58/88] communication in code only run with a certain debug option on will not work debug only happens at rank 0! --- code/spectral_utilities.f90 | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/code/spectral_utilities.f90 b/code/spectral_utilities.f90 index 326d7eabb..7b116e264 100644 --- a/code/spectral_utilities.f90 +++ b/code/spectral_utilities.f90 @@ -957,13 +957,10 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & age integer(pInt) :: & - j,k,ierr + j,k real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet - external :: & - MPI_Allreduce - write(6,'(/,a)') ' ... evaluating constitutive response ......................................' flush(6) age = .False. @@ -990,10 +987,6 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & defgradDetMin = min(defgradDetMin,defgradDet) end do - call MPI_Allreduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD,ierr) - if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') - call MPI_Allreduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD,ierr) - if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin flush(6) From 1e16ebe2efd81a277e63315cb8494ea3aba88dd6 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 8 Sep 2016 08:45:46 +0200 Subject: [PATCH 59/88] not used any more --- lib/damask/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/damask/environment.py b/lib/damask/environment.py index fedc22e36..040e705b3 100644 --- a/lib/damask/environment.py +++ b/lib/damask/environment.py @@ -1,6 +1,6 @@ # -*- coding: UTF-8 no BOM -*- -import os,subprocess,shlex,re,string +import os,subprocess,shlex,re class Environment(): __slots__ = [ \ From 59e8109ced4110960a8f21408b8c4df86235c3f8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 8 Sep 2016 08:51:07 +0200 Subject: [PATCH 60/88] some comments, missing ierr --- code/spectral_utilities.f90 | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/code/spectral_utilities.f90 b/code/spectral_utilities.f90 index 7b116e264..1a86b7648 100644 --- a/code/spectral_utilities.f90 +++ b/code/spectral_utilities.f90 @@ -957,7 +957,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & age integer(pInt) :: & - j,k + j,k,ierr real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet @@ -969,12 +969,10 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & age = .True. materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3]) endif - if (cutBack) then ! restore saved variables - age = .False. - endif + if (cutBack) age = .False. ! restore saved variables materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3]) - call debug_reset() + call debug_reset() ! this has no effect on rank >0 !-------------------------------------------------------------------------------------------------- ! calculate bounds of det(F) and report @@ -1019,7 +1017,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) - call debug_info() + call debug_info() ! this has no effect on rank >0 restartWrite = .false. ! reset restartWrite status cutBack = .false. ! reset cutBack status From da538fbce9a9a72a55ca1e36f93ee14b1a6068d2 Mon Sep 17 00:00:00 2001 From: chen Date: Thu, 8 Sep 2016 18:05:49 -0400 Subject: [PATCH 61/88] speed up of VTK data transformation --- processing/post/vtk_addPointcloudData.py | 72 +++++++++---------- processing/post/vtk_addRectilinearGridData.py | 43 ++++++----- 2 files changed, 55 insertions(+), 60 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 9736e54d0..6a8324567 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -6,6 +6,7 @@ import damask import numpy as np from collections import defaultdict from optparse import OptionParser +from vtk.util import numpy_support scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) @@ -14,10 +15,10 @@ scriptID = ' '.join([scriptName,damask.version]) # MAIN # -------------------------------------------------------------------- -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp). - -""", version = scriptID) +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description = """Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).""", + version = scriptID) parser.add_option( '--vtk', dest = 'vtk', @@ -104,60 +105,57 @@ for name in filenames: for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], ['tensor',9,options.tensor], - ['color',3,options.color], + ['color' ,3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): me = label[i] - if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) - elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension)) + if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) + elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension)) else: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector', 'tensor']: VTKarray[me] = vtk.vtkDoubleArray() - elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() - - VTKarray[me].SetNumberOfComponents(dimension) - VTKarray[me].SetName(label[i]) - if remarks != []: damask.util.croak(remarks) if errors != []: damask.util.croak(errors) table.close(dismiss = True) continue -# ------------------------------------------ process data --------------------------------------- +# --------------------------------------- process and add data ----------------------------------- - while table.data_read(): # read next data line of ASCII table + table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data - for datatype,labels in active.items(): # loop over scalar,color - for me in labels: # loop over all requested items - theData = [float(table.data[i]) for i in table.label_indexrange(me)] # read strings - if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*x),theData)) - elif datatype == 'scalar': VTKarray[me].InsertNextValue(theData[0]) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*theData) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ - np.array(theData) \ - .reshape(3,3).T \ - .reshape(9))) + for datatype,labels in active.items(): # loop over scalar,color + for me in labels: # loop over all requested items + VTKtype = vtk.VTK_DOUBLE + VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout - table.input_close() # close input ASCII table + if datatype == 'color': + VTKtype = vtk.VTK_UNSIGNED_CHAR + VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR + elif datatype == 'tensor': + VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3]) + VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6]) + VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7]) -# ------------------------------------------ add data --------------------------------------- + VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,array_type=VTKtype) + VTKarray[me].SetName(me) - for datatype,labels in active.items(): # loop over scalar,color - if datatype == 'color': - Polydata.GetPointData().SetScalars(VTKarray[active['color'][0]]) - Polydata.GetCellData().SetScalars(VTKarray[active['color'][0]]) - for me in labels: # loop over all requested items - Polydata.GetPointData().AddArray(VTKarray[me]) - Polydata.GetCellData().AddArray(VTKarray[me]) + if datatype == 'color': + Polydata.GetPointData().SetScalars(VTKarray[me]) + Polydata.GetCellData().SetScalars(VTKarray[me]) + else: + Polydata.GetPointData().AddArray(VTKarray[me]) + Polydata.GetCellData().AddArray(VTKarray[me]) + + + table.input_close() # close input ASCII table + +# ------------------------------------------ output result --------------------------------------- Polydata.Modified() if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update() -# ------------------------------------------ output result --------------------------------------- - writer = vtk.vtkXMLPolyDataWriter() writer.SetDataModeToBinary() writer.SetCompressorTypeToZLib() @@ -175,7 +173,7 @@ if options.render: actor.SetMapper(mapper) # Create the graphics structure. The renderer renders into the -# render window. The render window interactor captures mouse events +# render window. The render window interactively captures mouse events # and will perform appropriate camera or actor manipulation # depending on the nature of the events. diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index df85288af..f8d483bf0 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -4,6 +4,7 @@ import os,vtk import damask import numpy as np +from vtk.util import numpy_support from collections import defaultdict from optparse import OptionParser @@ -102,22 +103,16 @@ for name in filenames: for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], ['tensor',9,options.tensor], - ['color',3,options.color], + ['color' ,3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): me = label[i] - if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) + if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension)) else: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector','tensor']: VTKarray[me] = vtk.vtkDoubleArray() - elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() - - VTKarray[me].SetNumberOfComponents(dimension) - VTKarray[me].SetName(label[i]) - if remarks != []: damask.util.croak(remarks) if errors != []: damask.util.croak(errors) @@ -126,28 +121,30 @@ for name in filenames: # ------------------------------------------ process data --------------------------------------- - datacount = 0 + table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data - while table.data_read(): # read next data line of ASCII table + for datatype,labels in active.items(): # loop over scalar,color + for me in labels: # loop over all requested items + VTKtype = vtk.VTK_DOUBLE + VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout - datacount += 1 # count data lines - for datatype,labels in active.items(): # loop over scalar,color - for me in labels: # loop over all requested items - theData = [table.data[i] for i in table.label_indexrange(me)] # read strings - if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) - elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ - np.array(theData) \ - .reshape(3,3).T \ - .reshape(9))) + if datatype == 'color': + VTKtype = vtk.VTK_UNSIGNED_CHAR + VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR + elif datatype == 'tensor': + VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3]) + VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6]) + VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7]) + + VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,array_type=VTKtype) + VTKarray[me].SetName(me) table.close() # close input ASCII table # ------------------------------------------ add data --------------------------------------- - if datacount == Npoints: mode = 'point' - elif datacount == Ncells: mode = 'cell' + if len(table.data) == Npoints: mode = 'point' + elif len(table.data) == Ncells: mode = 'cell' else: damask.util.croak('Data count is incompatible with grid...') continue From da08ea70785d21ff316dfac7a870ecbc249d7eff Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 9 Sep 2016 04:41:06 +0200 Subject: [PATCH 62/88] updated version information after successful test of v2.0.1-103-g59e8109 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index e317ce9f1..aa0e19262 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-75-g6f843ce +v2.0.1-103-g59e8109 From 017c08a7bb31512b492c6033de55e3f71b3d23cc Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 9 Sep 2016 07:28:31 -0400 Subject: [PATCH 63/88] syntax fix --- processing/post/vtk_addPointcloudData.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 6a8324567..b9f66e684 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -3,7 +3,6 @@ import os,vtk import damask -import numpy as np from collections import defaultdict from optparse import OptionParser from vtk.util import numpy_support From 8c4767d58cd31c074b52197ed0d905e3d36ce58a Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 9 Sep 2016 07:28:54 -0400 Subject: [PATCH 64/88] syntax fix --- processing/post/vtk_addRectilinearGridData.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index f8d483bf0..7d7c39123 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -3,7 +3,6 @@ import os,vtk import damask -import numpy as np from vtk.util import numpy_support from collections import defaultdict from optparse import OptionParser From ee322be870726c89889e77a49ef0d500e23db79f Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 9 Sep 2016 16:17:00 -0400 Subject: [PATCH 65/88] use np.histogram2d, fixed list.append bug when using weight column --- processing/post/binXY.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/processing/post/binXY.py b/processing/post/binXY.py index 4fd635120..2c02b8dcb 100755 --- a/processing/post/binXY.py +++ b/processing/post/binXY.py @@ -20,7 +20,7 @@ Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional parser.add_option('-d','--data', dest = 'data', - type='string', nargs = 2, metavar = 'string string', + type = 'string', nargs = 2, metavar = 'string string', help = 'column labels containing x and y ') parser.add_option('-w','--weight', dest = 'weight', @@ -49,15 +49,15 @@ parser.add_option('-z','--zrange', parser.add_option('-i','--invert', dest = 'invert', action = 'store_true', - help = 'invert probability density [%default]') + help = 'invert probability density') parser.add_option('-r','--rownormalize', dest = 'normRow', action = 'store_true', - help = 'normalize probability density in each row [%default]') + help = 'normalize probability density in each row') parser.add_option('-c','--colnormalize', dest = 'normCol', action = 'store_true', - help = 'normalize probability density in each column [%default]') + help = 'normalize probability density in each column') parser.set_defaults(bins = (10,10), type = ('linear','linear','linear'), @@ -79,7 +79,8 @@ result = np.zeros((options.bins[0],options.bins[1],3),'f') if options.data is None: parser.error('no data columns specified.') -labels = options.data +labels = list(options.data) + if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value @@ -106,7 +107,7 @@ for name in filenames: # ------------------------------------------ sanity checks ---------------------------------------- missing_labels = table.data_readArray(labels) - + if len(missing_labels) > 0: damask.util.croak('column{} {} not found.'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels))) table.close(dismiss = True) @@ -119,12 +120,11 @@ for name in filenames: minmax[c] = np.log(minmax[c]) # change minmax to log, too delta = minmax[:,1]-minmax[:,0] - - for i in xrange(len(table.data)): - x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0]) - y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1]) - if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]: - grid[x,y] += 1. if options.weight is None else table.data[i,2] # count (weighted) occurrences + + (grid,xedges,yedges) = np.histogram2d(table.data[:,0],table.data[:,1], + bins=options.bins, + range=minmax, + weights=None if options.weight is None else table.data[:,2]) if options.normCol: for x in xrange(options.bins[0]): @@ -136,7 +136,7 @@ for name in filenames: sum = np.sum(grid[:,y]) if sum > 0.0: grid[:,y] /= sum - + if (minmax[2] == 0.0).all(): minmax[2] = [grid.min(),grid.max()] # auto scale from data if minmax[2,0] == minmax[2,1]: minmax[2,0] -= 1. @@ -147,7 +147,7 @@ for name in filenames: if options.type[2].lower() == 'log': grid = np.log(grid) minmax[2] = np.log(minmax[2]) - + delta[2] = minmax[2,1]-minmax[2,0] for x in xrange(options.bins[0]): From 0b4e75c20162c0cc4403cd783ab8a4c0e32ec709 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 10 Sep 2016 20:44:46 +0200 Subject: [PATCH 66/88] update function was not working due to name clash for self.update --- lib/damask/test/test.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index 5cc6d1210..f2c05efd6 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -20,18 +20,18 @@ class Test(): def __init__(self, **kwargs): defaults = {'description': '', - 'keep': False, - 'accept': False, - 'update': False, + 'keep': False, + 'accept': False, + 'updateRequest': False, } for arg in defaults.keys(): setattr(self,arg,kwargs.get(arg) if kwargs.get(arg) else defaults[arg]) - fh = logging.FileHandler('test.log') # create file handler which logs even debug messages + fh = logging.FileHandler('test.log') # create file handler which logs even debug messages fh.setLevel(logging.DEBUG) fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')) - ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level + ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level ch.setLevel(logging.INFO) ch.setFormatter(logging.Formatter('%(message)s')) @@ -61,7 +61,7 @@ class Test(): self.parser.set_defaults(keep = self.keep, accept = self.accept, - update = self.update, + update = self.updateRequest, ) @@ -80,10 +80,10 @@ class Test(): self.postprocess(variant) - if self.options.update and not self.update(variant): - logging.critical('update for "{}" failed.'.format(name)) - elif not (self.options.accept or self.compare(variant)): # no update, do comparison - return variant+1 # return culprit + if self.options.update: + if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name)) + elif not (self.options.accept or self.compare(variant)): # no update, do comparison + return variant+1 # return culprit except Exception as e : logging.critical('exception during variant execution: {}'.format(e)) @@ -135,7 +135,7 @@ class Test(): def update(self,variant): """Update reference with current results.""" logging.critical('update not supported.') - return False + return 1 def dirReference(self): From 2daad7542eb20c5cba101a9a4a72a796f43c077a Mon Sep 17 00:00:00 2001 From: tiasmaiti Date: Sat, 10 Sep 2016 16:33:28 -0400 Subject: [PATCH 67/88] added option for periodic averaging --- processing/post/groupTable.py | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/processing/post/groupTable.py b/processing/post/groupTable.py index 924105fba..cb1359c78 100755 --- a/processing/post/groupTable.py +++ b/processing/post/groupTable.py @@ -4,9 +4,18 @@ import os,sys import math # noqa import numpy as np -from optparse import OptionParser +from optparse import OptionParser, OptionGroup import damask +#"https://en.wikipedia.org/wiki/Center_of_mass#Systems_with_periodic_boundary_conditions" +def periodicAverage(Points, Box): + theta = (Points/Box[1]) * (2.0*np.pi) + xi = np.cos(theta) + zeta = np.sin(theta) + theta_avg = np.arctan2(-1.0*zeta.mean(), -1.0*xi.mean()) + np.pi + Pmean = Box[1] * theta_avg/(2.0*np.pi) + return Pmean + scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) @@ -35,7 +44,23 @@ parser.add_option('-a','--all', action = 'store_true', help = 'apply mapping function also to grouping column') -parser.set_defaults(function = 'np.average') +group = OptionGroup(parser, "periodic averaging", "") + +group.add_option('-p','--periodic', + dest = 'periodic', + action = 'store_true', + help = 'calculate average in periodic space defined by periodic length [%default]') +group.add_option('--boundary', + dest = 'boundary', metavar = 'MIN MAX', + type = 'float', nargs = 2, + help = 'define periodic box end points %default') + +parser.add_option_group(group) + +parser.set_defaults(function = 'np.average', + all = False, + periodic = False, + boundary = [0.0, 1.0]) (options,filenames) = parser.parse_args() @@ -92,7 +117,10 @@ for name in filenames: grpTable = np.empty((len(values), cols)) # initialize output for i in xrange(len(values)): # iterate over groups (unique values in grpColumn) - grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function + if options.periodic : + grpTable[i] = periodicAverage(table.data[index[i]:index[i+1]],options.boundary) # apply periodicAverage mapping function + else : + grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value table.data = grpTable From 06405bc2514de01aa04b33e5ff4384e57baab54c Mon Sep 17 00:00:00 2001 From: tiasmaiti Date: Sat, 10 Sep 2016 16:35:50 -0400 Subject: [PATCH 68/88] fixed coordinate system convention to be right handed always instead of random left and right handed assignment --- processing/post/addSpectralDecomposition.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/processing/post/addSpectralDecomposition.py b/processing/post/addSpectralDecomposition.py index aa87e6f3c..ba039758a 100755 --- a/processing/post/addSpectralDecomposition.py +++ b/processing/post/addSpectralDecomposition.py @@ -78,6 +78,8 @@ for name in filenames: for type, data in items.iteritems(): for column in data['column']: (u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape'])) + if np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : + v[:, 2] *= -1.0 table.data_append(list(u)) table.data_append(list(v.transpose().reshape(data['dim']))) outputAlive = table.data_write() # output processed line From 77729f39e32d6f282e1be2810a51e275890bee0a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 11 Sep 2016 14:41:48 +0200 Subject: [PATCH 69/88] commenting last commit --- processing/post/addSpectralDecomposition.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/processing/post/addSpectralDecomposition.py b/processing/post/addSpectralDecomposition.py index ba039758a..f65e8ac76 100755 --- a/processing/post/addSpectralDecomposition.py +++ b/processing/post/addSpectralDecomposition.py @@ -78,8 +78,7 @@ for name in filenames: for type, data in items.iteritems(): for column in data['column']: (u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape'])) - if np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : - v[:, 2] *= -1.0 + if np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed coordinate system table.data_append(list(u)) table.data_append(list(v.transpose().reshape(data['dim']))) outputAlive = table.data_write() # output processed line From 9d7cad1fa49193fb21dbfff078560f97abb7a337 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 11 Sep 2016 19:03:32 +0200 Subject: [PATCH 70/88] python3 compatible solutions that still work on python2 --- lib/damask/orientation.py | 17 +++++---- lib/damask/test/test.py | 72 +++++++++++++++++++-------------------- lib/damask/util.py | 45 +++++++++++------------- 3 files changed, 63 insertions(+), 71 deletions(-) diff --git a/lib/damask/orientation.py b/lib/damask/orientation.py index 886cd5a36..21a32ab17 100644 --- a/lib/damask/orientation.py +++ b/lib/damask/orientation.py @@ -154,7 +154,7 @@ class Quaternion: def __div__(self, other): """division""" - if isinstance(other, (int,float,long)): + if isinstance(other, (int,float)): w = self.w / other x = self.x / other y = self.y / other @@ -165,7 +165,7 @@ class Quaternion: def __idiv__(self, other): """in place division""" - if isinstance(other, (int,float,long)): + if isinstance(other, (int,float)): self.w /= other self.x /= other self.y /= other @@ -338,7 +338,7 @@ class Quaternion: type = "bunge", degrees = False, standardRange = False): - u""" + """ Orientation as Bunge-Euler angles conversion of ACTIVE rotation to Euler angles taken from: @@ -553,7 +553,7 @@ class Symmetry: def __init__(self, symmetry = None): """lattice with given symmetry, defaults to None""" - if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices: + if isinstance(symmetry, str) and symmetry.lower() in Symmetry.lattices: self.lattice = symmetry.lower() else: self.lattice = None @@ -650,8 +650,8 @@ class Symmetry: [ 1.0,0.0,0.0,0.0 ], ] - return map(Quaternion, - np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else xrange(len(symQuats))]) + return list(map(Quaternion, + np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else range(len(symQuats))])) def equivalentQuaternions(self, @@ -887,8 +887,7 @@ class Orientation: def equivalentOrientations(self, who = []): - return map(lambda q: Orientation(quaternion = q, symmetry = self.symmetry.lattice), - self.equivalentQuaternions(who)) + return [Orientation(quaternion = q, symmetry = self.symmetry.lattice) for q in self.equivalentQuaternions(who)] def reduced(self): """Transform orientation to fall into fundamental zone according to symmetry""" @@ -917,7 +916,7 @@ class Orientation: for i,sA in enumerate(mySymQs): for j,sB in enumerate(otherSymQs): theQ = sA.conjugated()*misQ*sB - for k in xrange(2): + for k in range(2): theQ.conjugate() breaker = self.symmetry.inFZ(theQ) \ and (not SST or other.symmetry.inDisorientationSST(theQ)) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index c05a6474d..726c8820c 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -55,7 +55,7 @@ class Test(): def execute(self): """Run all variants and report first failure.""" if self.options.debug: - for variant in xrange(len(self.variants)): + for variant in range(len(self.variants)): try: self.postprocess(variant) if not self.compare(variant): @@ -68,7 +68,7 @@ class Test(): if not self.testPossible(): return -1 self.clean() self.prepareAll() - for variant in xrange(len(self.variants)): + for variant in range(len(self.variants)): try: self.prepare(variant) self.run(variant) @@ -178,7 +178,7 @@ class Test(): """ if not B or len(B) == 0: B = A - for source,target in zip(map(mapA,A),map(mapB,B)): + for source,target in zip(list(map(mapA,A)),list(map(mapB,B))): try: shutil.copy2(source,target) except: @@ -269,9 +269,9 @@ class Test(): max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) refArrayNonZero = refArrayNonZero[curArray.nonzero()] curArray = curArray[curArray.nonzero()] - print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err, + print((' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err, refArrayNonZero[max_loc], - curArray[max_loc])) + curArray[max_loc]))) return max_err else: raise Exception('mismatch in array size to compare') @@ -301,26 +301,26 @@ class Test(): # check if comparison is possible and determine lenght of columns if len(headings0) == len(headings1) == len(normHeadings): dataLength = len(headings0) - length = [1 for i in xrange(dataLength)] - shape = [[] for i in xrange(dataLength)] - data = [[] for i in xrange(dataLength)] - maxError = [0.0 for i in xrange(dataLength)] - absTol = [absoluteTolerance for i in xrange(dataLength)] - column = [[1 for i in xrange(dataLength)] for j in xrange(2)] + length = [1 for i in range(dataLength)] + shape = [[] for i in range(dataLength)] + data = [[] for i in range(dataLength)] + maxError = [0.0 for i in range(dataLength)] + absTol = [absoluteTolerance for i in range(dataLength)] + column = [[1 for i in range(dataLength)] for j in range(2)] - norm = [[] for i in xrange(dataLength)] - normLength = [1 for i in xrange(dataLength)] - normShape = [[] for i in xrange(dataLength)] - normColumn = [1 for i in xrange(dataLength)] + norm = [[] for i in range(dataLength)] + normLength = [1 for i in range(dataLength)] + normShape = [[] for i in range(dataLength)] + normColumn = [1 for i in range(dataLength)] - for i in xrange(dataLength): + for i in range(dataLength): if headings0[i]['shape'] != headings1[i]['shape']: raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) shape[i] = headings0[i]['shape'] - for j in xrange(np.shape(shape[i])[0]): + for j in range(np.shape(shape[i])[0]): length[i] *= shape[i][j] normShape[i] = normHeadings[i]['shape'] - for j in xrange(np.shape(normShape[i])[0]): + for j in range(np.shape(normShape[i])[0]): normLength[i] *= normShape[i][j] else: raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0), @@ -332,7 +332,7 @@ class Test(): table1 = damask.ASCIItable(name=file1,readonly=True) table1.head_read() - for i in xrange(dataLength): + for i in range(dataLength): key0 = ('1_' if length[i]>1 else '') + headings0[i]['label'] key1 = ('1_' if length[i]>1 else '') + headings1[i]['label'] normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label'] @@ -350,11 +350,11 @@ class Test(): line0 = 0 while table0.data_read(): # read next data line of ASCII table if line0 not in skipLines: - for i in xrange(dataLength): - myData = np.array(map(float,table0.data[column[0][i]:\ - column[0][i]+length[i]]),'d') - normData = np.array(map(float,table0.data[normColumn[i]:\ - normColumn[i]+normLength[i]]),'d') + for i in range(dataLength): + myData = np.array(list(map(float,table0.data[column[0][i]:\ + column[0][i]+length[i]])),'d') + normData = np.array(list(map(float,table0.data[normColumn[i]:\ + normColumn[i]+normLength[i]])),'d') data[i] = np.append(data[i],np.reshape(myData,shape[i])) if normType == 'pInf': norm[i] = np.append(norm[i],np.max(np.abs(normData))) @@ -362,11 +362,11 @@ class Test(): norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType)) line0 += 1 - for i in xrange(dataLength): - if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))] + for i in range(dataLength): + if not perLine: norm[i] = [np.max(norm[i]) for j in range(line0-len(skipLines))] data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]]) if any(norm[i]) == 0.0 or absTol[i]: - norm[i] = [1.0 for j in xrange(line0-len(skipLines))] + norm[i] = [1.0 for j in range(line0-len(skipLines))] absTol[i] = True if perLine: logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label'])) @@ -376,9 +376,9 @@ class Test(): line1 = 0 while table1.data_read(): # read next data line of ASCII table if line1 not in skipLines: - for i in xrange(dataLength): - myData = np.array(map(float,table1.data[column[1][i]:\ - column[1][i]+length[i]]),'d') + for i in range(dataLength): + myData = np.array(list(map(float,table1.data[column[1][i]:\ + column[1][i]+length[i]])),'d') maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/ norm[i][line1-len(skipLines)]) line1 +=1 @@ -386,7 +386,7 @@ class Test(): if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1)) logging.info(' ********') - for i in xrange(dataLength): + for i in range(dataLength): if absTol[i]: logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i], headings0[i]['label'], @@ -424,7 +424,7 @@ class Test(): if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all logging.info('comparing ASCIItables statistically') - for i in xrange(len(columns)): + for i in range(len(columns)): columns[i] = columns[0] if not columns[i] else \ ([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \ columns[i] @@ -440,7 +440,7 @@ class Test(): table.close() - for i in xrange(1,len(data)): + for i in range(1,len(data)): delta = data[i]-data[i-1] normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 normedDelta = np.where(normBy>preFilter,delta/normBy,0.0) @@ -480,7 +480,7 @@ class Test(): if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all logging.info('comparing ASCIItables') - for i in xrange(len(columns)): + for i in range(len(columns)): columns[i] = columns[0] if not columns[i] else \ ([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \ columns[i] @@ -499,7 +499,7 @@ class Test(): maximum /= len(tables) maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns - for i in xrange(len(data)): + for i in range(len(data)): data[i] /= maximum mask = np.zeros_like(table.data,dtype='bool') @@ -509,7 +509,7 @@ class Test(): allclose = True # start optimistic - for i in xrange(1,len(data)): + for i in range(1,len(data)): if debug: t0 = np.where(mask,0.0,data[i-1]) t1 = np.where(mask,0.0,data[i ]) diff --git a/lib/damask/util.py b/lib/damask/util.py index ef2de48eb..087ffcef5 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -40,7 +40,7 @@ def srepr(arg,glue = '\n'): hasattr(arg, "__getitem__") or hasattr(arg, "__iter__")): return glue.join(srepr(x) for x in arg) - return arg if isinstance(arg,basestring) else repr(arg) + return arg if isinstance(arg,str) else repr(arg) # ----------------------------- def croak(what, newline = True): @@ -136,29 +136,22 @@ class extendableOption(Option): class backgroundMessage(threading.Thread): """reporting with animation to indicate progress""" - choices = {'bounce': ['_', 'o', 'O', u'\u00B0', - u'\u203e',u'\u203e',u'\u00B0','O','o','_'], - 'spin': [u'\u25dc',u'\u25dd',u'\u25de',u'\u25df'], - 'circle': [u'\u25f4',u'\u25f5',u'\u25f6',u'\u25f7'], - 'hexagon': [u'\u2b22',u'\u2b23'], - 'square': [u'\u2596',u'\u2598',u'\u259d',u'\u2597'], - 'triangle': [u'\u140a',u'\u140a',u'\u1403',u'\u1405',u'\u1405',u'\u1403'], - 'amoeba': [u'\u2596',u'\u258f',u'\u2598',u'\u2594',u'\u259d',u'\u2595', - u'\u2597',u'\u2582'], - 'beat': [u'\u2581',u'\u2582',u'\u2583',u'\u2585',u'\u2586',u'\u2587', - u'\u2587',u'\u2586',u'\u2585',u'\u2583',u'\u2582',], - 'prison': [u'\u168b',u'\u168c',u'\u168d',u'\u168f',u'\u168e',u'\u168d', - u'\u168c',u'\u168b',], - 'breath': [u'\u1690',u'\u1691',u'\u1692',u'\u1693',u'\u1694',u'\u1693', - u'\u1692',u'\u1691',u'\u1690',], - 'pulse': [u'·',u'•',u'\u25cf',u'\u25cf',u'•',], - 'ant': [u'\u2801',u'\u2802',u'\u2810',u'\u2820',u'\u2804',u'\u2840', - u'\u2880',u'\u2820',u'\u2804',u'\u2802',u'\u2810',u'\u2808'], - 'juggle': [u'\ua708',u'\ua709',u'\ua70a',u'\ua70b',u'\ua70c',u'\ua711', - u'\ua710',u'\ua70f',u'\ua70d',], -# 'wobbler': [u'\u2581',u'\u25e3',u'\u258f',u'\u25e4',u'\u2594',u'\u25e5',u'\u2595',u'\u25e2',], - 'grout': [u'\u2581',u'\u258f',u'\u2594',u'\u2595',], - 'partner': [u'\u26ac',u'\u26ad',u'\u26ae',u'\u26af',u'\u26ae',u'\u26ad',], + choices = {'bounce': ['_', 'o', 'O', '°', '‾', '‾', '°', 'O', 'o', '_'], + 'spin': ['◜', '◝', '◞', '◟'], + 'circle': ['◴', '◵', '◶', '◷'], + 'hexagon': ['⬢', '⬣'], + 'square': ['▖', '▘', '▝', '▗'], + 'triangle': ['ᐊ', 'ᐊ', 'ᐃ', 'ᐅ', 'ᐅ', 'ᐃ'], + 'amoeba': ['▖', '▏', '▘', '▔', '▝', '▕', '▗', '▂'], + 'beat': ['▁', '▂', '▃', '▅', '▆', '▇', '▇', '▆', '▅', '▃', '▂'], + 'prison': ['ᚋ', 'ᚌ', 'ᚍ', 'ᚏ', 'ᚎ', 'ᚍ', 'ᚌ', 'ᚋ'], + 'breath': ['ᚐ', 'ᚑ', 'ᚒ', 'ᚓ', 'ᚔ', 'ᚓ', 'ᚒ', 'ᚑ', 'ᚐ'], + 'pulse': ['·', '•', '●', '●', '•'], + 'ant': ['⠁', '⠂', '⠐', '⠠', '⠄', '⡀', '⢀', '⠠', '⠄', '⠂', '⠐', '⠈'], + 'juggle': ['꜈', '꜉', '꜊', '꜋', '꜌', '꜑', '꜐', '꜏', '꜍'], +# 'wobbler': ['▁', '◣', '▏', '◤', '▔', '◥', '▕', '◢'], + 'grout': ['▁', '▏', '▔', '▕'], + 'partner': ['⚬', '⚭', '⚮', '⚯', '⚮', '⚭'], 'classic': ['-', '\\', '|', '/',], } @@ -170,7 +163,7 @@ class backgroundMessage(threading.Thread): self.new_message = '' self.counter = 0 self.gap = ' ' - self.symbols = self.choices[symbol if symbol in self.choices else random.choice(self.choices.keys())] + self.symbols = self.choices[symbol if symbol in self.choices else random.choice(list(self.choices.keys()))] self.waittime = wait def __quit__(self): @@ -199,7 +192,7 @@ class backgroundMessage(threading.Thread): def print_message(self): length = len(self.symbols[self.counter] + self.gap + self.message) sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \ - self.symbols[self.counter].encode('utf-8') + self.gap + self.new_message) # delete former and print new message + self.symbols[self.counter] + self.gap + self.new_message) # delete former and print new message sys.stderr.flush() self.message = self.new_message From e76c44b58b3826a9fbb97fa058398b08017bb5c6 Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 12 Sep 2016 04:41:00 +0200 Subject: [PATCH 71/88] updated version information after successful test of v2.0.1-113-g77729f3 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index aa0e19262..e68f28719 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-103-g59e8109 +v2.0.1-113-g77729f3 From 4cb27b76dfef39dfdddb42d46ab7fb55310f672d Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 12 Sep 2016 16:40:50 +0200 Subject: [PATCH 72/88] updated version information after successful test of v2.0.1-120-g6941004 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index e68f28719..62d5dc773 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-113-g77729f3 +v2.0.1-120-g6941004 From 804931db9ecb20c171059e69bae1d72c5f86d5e7 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 12 Sep 2016 19:09:48 -0400 Subject: [PATCH 73/88] force type cast index to int for numpy array Force type casting indices into int array to avoid warning: VisibleDeprecationWarning: using a non-integer number instead of an integer will result in an error in the future For more info, see http://stackoverflow.com/questions/20084218/deprecation-warning-in-scikit-learn-svmlight-format-loader --- processing/pre/geom_addPrimitive.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/processing/pre/geom_addPrimitive.py b/processing/pre/geom_addPrimitive.py index 952ba0c4b..8d934368f 100755 --- a/processing/pre/geom_addPrimitive.py +++ b/processing/pre/geom_addPrimitive.py @@ -82,7 +82,7 @@ for name in filenames: table.head_read() info,extra_header = table.head_getGeom() - + damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), 'size x y z: %s'%(' x '.join(map(str,info['size']))), 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), @@ -126,9 +126,9 @@ for name in filenames: primPos = invRotation*gridpos # rotate back to primitive coordinate system if np.dot(mask*primPos/dim,mask*primPos/dim) <= 0.25 and \ np.all(abs((1.-mask)*primPos/dim) <= 0.5): # inside ellipsoid and inside box - microstructure[(gridpos[0]+options.center[0])%info['grid'][0], - (gridpos[1]+options.center[1])%info['grid'][1], - (gridpos[2]+options.center[2])%info['grid'][2]] = options.fill # assign microstructure index + microstructure[int((gridpos[0]+options.center[0])%info['grid'][0]), + int((gridpos[1]+options.center[1])%info['grid'][1]), + int((gridpos[2]+options.center[2])%info['grid'][2])] = options.fill # assign microstructure index newInfo['microstructures'] = microstructure.max() @@ -153,7 +153,7 @@ for name in filenames: table.labels_clear() table.head_write() table.output_flush() - + # --- write microstructure information ------------------------------------------------------------ formatwidth = int(math.floor(math.log10(microstructure.max())+1)) From 67a5edc93b153ad285745b065538177c7b25c0e7 Mon Sep 17 00:00:00 2001 From: Test User Date: Tue, 13 Sep 2016 16:41:17 +0200 Subject: [PATCH 74/88] updated version information after successful test of v2.0.1-122-g804931d --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 62d5dc773..5923dbca4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-120-g6941004 +v2.0.1-122-g804931d From 53d2df6058f6ce70ece5ca060eeca185632e6075 Mon Sep 17 00:00:00 2001 From: chen Date: Wed, 14 Sep 2016 12:37:07 -0400 Subject: [PATCH 75/88] vector support 1D vector with arbitrary lenght simple hack to bypass the size check for vectors since vtk does not care the vector size. --- processing/post/vtk_addPointcloudData.py | 2 +- processing/post/vtk_addRectilinearGridData.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index b9f66e684..808dd7262 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -102,7 +102,7 @@ for name in filenames: active = defaultdict(list) for datatype,dimension,label in [['scalar',1,options.scalar], - ['vector',3,options.vector], + ['vector',99,options.vector], ['tensor',9,options.tensor], ['color' ,3,options.color], ]: diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 7d7c39123..7906aeabf 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -100,7 +100,7 @@ for name in filenames: active = defaultdict(list) for datatype,dimension,label in [['scalar',1,options.scalar], - ['vector',3,options.vector], + ['vector',99,options.vector], ['tensor',9,options.tensor], ['color' ,3,options.color], ]: From ae16ccae610e5c70d09b00d95f1c7b451352e8fa Mon Sep 17 00:00:00 2001 From: chen Date: Wed, 14 Sep 2016 15:06:00 -0400 Subject: [PATCH 76/88] need to use deep copy when converting np array to vtk array for batch array conversion, it is necessary to use the deep copy to ensure numpy_support.numpy_to_vtk works as it should. --- processing/post/vtk_addPointcloudData.py | 2 +- processing/post/vtk_addRectilinearGridData.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 808dd7262..fd1247f52 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -137,7 +137,7 @@ for name in filenames: VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6]) VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7]) - VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,array_type=VTKtype) + VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype) VTKarray[me].SetName(me) if datatype == 'color': diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 7906aeabf..ba8f313ee 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -135,7 +135,7 @@ for name in filenames: VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6]) VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7]) - VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,array_type=VTKtype) + VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype) VTKarray[me].SetName(me) table.close() # close input ASCII table From 59c1eee16b5f742541d61de24ed44b3c03e884d7 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 14 Sep 2016 19:42:05 -0400 Subject: [PATCH 77/88] added possibility to restrict seed coordinates to fraction of unit box --- processing/pre/seeds_fromRandom.py | 37 +++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/processing/pre/seeds_fromRandom.py b/processing/pre/seeds_fromRandom.py index 038690df4..f25f210e9 100755 --- a/processing/pre/seeds_fromRandom.py +++ b/processing/pre/seeds_fromRandom.py @@ -29,7 +29,7 @@ def kdtree_search(cloud, queryPoints): # -------------------------------------------------------------------- parser = OptionParser(option_class=damask.extendableOption, usage='%prog [options]', description = """ -Distribute given number of points randomly within the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0]. +Distribute given number of points randomly within (a fraction of) the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0]. Reports positions with random crystal orientations in seeds file format to STDOUT. """, version = scriptID) @@ -38,6 +38,11 @@ parser.add_option('-N', dest = 'N', type = 'int', metavar = 'int', help = 'number of seed points [%default]') +parser.add_option('-f', + '--fraction', + dest = 'fraction', + type = 'float', nargs = 3, metavar = 'float float float', + help='fractions along x,y,z of unit cube to fill %default') parser.add_option('-g', '--grid', dest = 'grid', @@ -86,8 +91,7 @@ group.add_option( '-s', action = 'store_true', dest = 'selective', help = 'selective picking of seed points from random seed points [%default]') -group.add_option( '-f', - '--force', +group.add_option( '--force', action = 'store_true', dest = 'force', help = 'try selective picking despite large seed point number [%default]') @@ -103,6 +107,7 @@ parser.add_option_group(group) parser.set_defaults(randomSeed = None, grid = (16,16,16), + fraction = (1.0,1.0,1.0), N = 20, weights = False, max = 0.0, @@ -118,6 +123,7 @@ parser.set_defaults(randomSeed = None, (options,filenames) = parser.parse_args() +options.fraction = np.array(options.fraction) options.grid = np.array(options.grid) gridSize = options.grid.prod() @@ -160,16 +166,25 @@ for name in filenames: grainEuler[2,:] *= 360.0 # phi_2 is uniformly distributed if not options.selective: + seeds = np.array([]) + + while len(seeds) < options.N: - seeds = np.zeros((3,options.N),dtype='d') # seed positions array - gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions + theSeeds = np.zeros((options.N,3),dtype=float) # seed positions array + gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions - seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\ - +np.random.random(options.N)) /options.grid[0] - seeds[1,:] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\ - +np.random.random(options.N)) /options.grid[1] - seeds[2,:] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\ - +np.random.random(options.N)) /options.grid[2] + theSeeds[:,0] = (np.mod(gridpoints ,options.grid[0])\ + +np.random.random(options.N)) /options.grid[0] + theSeeds[:,1] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\ + +np.random.random(options.N)) /options.grid[1] + theSeeds[:,2] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\ + +np.random.random(options.N)) /options.grid[2] + + goodSeeds = theSeeds[np.all(theSeeds<=options.fraction,axis=1)] # pick seeds within threshold fraction + seeds = goodSeeds if len(seeds) == 0 else np.vstack((seeds,goodSeeds)) + if len(seeds) > options.N: seeds = seeds[:min(options.N,len(seeds))] + + seeds = seeds.T # switch layout to point index as last index else: From f3308dbef64a09f09677f066c266359ae3270522 Mon Sep 17 00:00:00 2001 From: chen Date: Wed, 14 Sep 2016 19:59:26 -0400 Subject: [PATCH 78/88] replace scalar/vector option with data vtk does not care 1D data array length --- processing/post/vtk_addPointcloudData.py | 15 +++++---------- processing/post/vtk_addRectilinearGridData.py | 16 +++++----------- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index fd1247f52..b96c9ce17 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -31,13 +31,10 @@ parser.add_option('-r', '--render', dest = 'render', action = 'store_true', help = 'open output in VTK render window') -parser.add_option('-s', '--scalar', dest='scalar', action='extend', - metavar ='', - help = 'scalar values') -parser.add_option('-v', '--vector', - dest = 'vector', +parser.add_option('-d', '--data', + dest = 'data', action = 'extend', metavar = '', - help = 'vector value label(s)') + help = 'scalar/vector value(s) label(s)') parser.add_option('-t', '--tensor', dest = 'tensor', action = 'extend', metavar = '', @@ -46,8 +43,7 @@ parser.add_option('-c', '--color', dest='color', action='extend', metavar ='', help = 'RGB color tuples') -parser.set_defaults(scalar = [], - vector = [], +parser.set_defaults(data = [], tensor = [], color = [], inplace = False, @@ -101,8 +97,7 @@ for name in filenames: VTKarray = {} active = defaultdict(list) - for datatype,dimension,label in [['scalar',1,options.scalar], - ['vector',99,options.vector], + for datatype,dimension,label in [['data',99,options.vector], ['tensor',9,options.tensor], ['color' ,3,options.color], ]: diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index ba8f313ee..78d599f0b 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -31,14 +31,10 @@ parser.add_option('-r', '--render', dest = 'render', action = 'store_true', help = 'open output in VTK render window') -parser.add_option('-s', '--scalar', - dest = 'scalar', +parser.add_option('-d', '--data', + dest = 'data', action = 'extend', metavar = '', - help = 'scalar value label(s)') -parser.add_option('-v', '--vector', - dest = 'vector', - action = 'extend', metavar = '', - help = 'vector value label(s)') + help = 'scalar/vector value(s) label(s)') parser.add_option('-t', '--tensor', dest = 'tensor', action = 'extend', metavar = '', @@ -48,8 +44,7 @@ parser.add_option('-c', '--color', action = 'extend', metavar = '', help = 'RGB color tuple label') -parser.set_defaults(scalar = [], - vector = [], +parser.set_defaults(data = [], tensor = [], color = [], inplace = False, @@ -99,8 +94,7 @@ for name in filenames: VTKarray = {} active = defaultdict(list) - for datatype,dimension,label in [['scalar',1,options.scalar], - ['vector',99,options.vector], + for datatype,dimension,label in [['data',99,options.data], ['tensor',9,options.tensor], ['color' ,3,options.color], ]: From 3b0b677dc9bd0d72e79414d894e4609fd6d317b3 Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 15 Sep 2016 16:41:14 +0200 Subject: [PATCH 79/88] updated version information after successful test of v2.0.1-130-gf3308db --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 5923dbca4..2a89ebb42 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-122-g804931d +v2.0.1-130-gf3308db From 1180c8bb88c75da8f0f5a3eb3943179fc6b81bef Mon Sep 17 00:00:00 2001 From: Brendan Robert Vande Kieft Date: Thu, 15 Sep 2016 17:36:43 -0400 Subject: [PATCH 80/88] Fix calculation of interfaceEnergy --- processing/pre/geom_grainGrowth.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index 43e2e49d9..95c67fb28 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -81,17 +81,20 @@ for name in filenames: periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2, grid[1]/2:-grid[1]/2, grid[2]/2:-grid[2]/2] # periodically extend the microstructure + # store a copy the initial microstructure to find locations of immutable indices microstructure_original = np.copy(microstructure) X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]] + + # Calculates gaussian weights for simulating 3d diffusion gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5) gauss[:,:,grid[2]/2::] = gauss[:,:,round(grid[2]/2.)-1::-1] # trying to cope with uneven (odd) grid size gauss[:,grid[1]/2::,:] = gauss[:,round(grid[1]/2.)-1::-1,:] gauss[grid[0]/2::,:,:] = gauss[round(grid[0]/2.)-1::-1,:,:] gauss = np.fft.rfftn(gauss) - interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 + interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood @@ -101,9 +104,11 @@ for name in filenames: for j in (-1,0,1): for k in (-1,0,1): # assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood) - interfaceEnergy = np.maximum(boundary, + boundary = np.maximum(boundary, interfacialEnergy(microstructure,np.roll(np.roll(np.roll( microstructure,i,axis=0), j,axis=1), k,axis=2))) + interfaceEnergy = boundary + # periodically extend interfacial energy array by half a grid size in positive and negative directions periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2, grid[1]/2:-grid[1]/2, From 87055f0e63037903e18847fbbbb76e3909906d71 Mon Sep 17 00:00:00 2001 From: Test User Date: Sat, 17 Sep 2016 10:59:57 +0200 Subject: [PATCH 81/88] updated version information after successful test of v2.0.1-132-g1180c8b --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 2a89ebb42..4dd2689d4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-130-gf3308db +v2.0.1-132-g1180c8b From 7291c20cd46f3b518a0b37b9ca9210db39f04c68 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 21 Jul 2016 10:06:02 +0200 Subject: [PATCH 82/88] added reference --- lib/damask/orientation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/damask/orientation.py b/lib/damask/orientation.py index f34f33a53..9d14d12c5 100644 --- a/lib/damask/orientation.py +++ b/lib/damask/orientation.py @@ -999,7 +999,8 @@ class Orientation: if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None if int(direction) == 0: return None - # KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592 DOES THIS PAPER EXISTS? + # KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592 + # for KS rotation matrices also check K. Kitahara et al./Acta Materialia 54 (2006) 1279-1288 # GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81 # GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81 # NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386 From 4ccc0cf5a1135db7c38b42948023d3a56abc8876 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 19 Aug 2016 18:53:48 +0200 Subject: [PATCH 83/88] tool to mirror geometries (enforce periodicity on measured data) --- processing/pre/geom_mirror.py | 106 ++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100755 processing/pre/geom_mirror.py diff --git a/processing/pre/geom_mirror.py b/processing/pre/geom_mirror.py new file mode 100755 index 000000000..61f212039 --- /dev/null +++ b/processing/pre/geom_mirror.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os,sys,math +import numpy as np +import damask +from scipy import ndimage +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName,damask.version]) + +#-------------------------------------------------------------------------------------------------- +# MAIN +#-------------------------------------------------------------------------------------------------- + +parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ +Mirros spectral geometry description along given direction. + +""", version=scriptID) + + +(options, filenames) = parser.parse_args() + + + +# --- loop over input files ------------------------------------------------------------------------- + +if filenames == []: filenames = [None] + +for name in filenames: + try: + table = damask.ASCIItable(name = name, + buffered = False, labeled = False) + except: continue + damask.util.report(scriptName,name) + +# --- interpret header ---------------------------------------------------------------------------- + + table.head_read() + info,extra_header = table.head_getGeom() + + damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), + 'size x y z: %s'%(' x '.join(map(str,info['size']))), + 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), + 'homogenization: %i'%info['homogenization'], + 'microstructures: %i'%info['microstructures'], + ]) + + errors = [] + if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') + if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') + if errors != []: + damask.util.croak(errors) + table.close(dismiss = True) + continue + +# --- read data ------------------------------------------------------------------------------------ + + microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure + + microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2) + microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1) + microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0) + +# --- do work ------------------------------------------------------------------------------------ + + newInfo = { + 'size': microstructure.shape*info['size']/info['grid'], + 'grid': microstructure.shape, + } + + +# --- report --------------------------------------------------------------------------------------- + + remarks = [] + if (any(newInfo['grid'] != info['grid'])): + remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid'])))) + if (any(newInfo['size'] != info['size'])): + remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size'])))) + if remarks != []: damask.util.croak(remarks) + +# --- write header --------------------------------------------------------------------------------- + + table.labels_clear() + table.info_clear() + table.info_append([ + scriptID + ' ' + ' '.join(sys.argv[1:]), + "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']), + "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']), + "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), + "homogenization\t{homog}".format(homog=info['homogenization']), + "microstructures\t{microstructures}".format(microstructures=info['microstructures']), + extra_header + ]) + table.head_write() + +# --- write microstructure information ------------------------------------------------------------ + + formatwidth = int(math.floor(math.log10(microstructure.max())+1)) + table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose() + table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ') + +# --- output finalization -------------------------------------------------------------------------- + + table.close() # close ASCII table From 9e3c54b0e337b660ec2305ca5f99a4e28c9c97de Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 2 Sep 2016 18:19:01 +0200 Subject: [PATCH 84/88] a few comments and adjusted calculation --- lib/damask/orientation.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/damask/orientation.py b/lib/damask/orientation.py index 9d14d12c5..e28d30a7a 100644 --- a/lib/damask/orientation.py +++ b/lib/damask/orientation.py @@ -995,6 +995,12 @@ class Orientation: relationModel, direction, targetSymmetry = None): + """ + orientation relationship + + positive number: fcc --> bcc + negative number: bcc --> fcc + """ if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None if int(direction) == 0: return None @@ -1227,14 +1233,14 @@ class Orientation: myPlane /= np.linalg.norm(myPlane) myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3 myNormal /= np.linalg.norm(myNormal) - myMatrix = np.array([myPlane,myNormal,np.cross(myPlane,myNormal)]) + myMatrix = np.array([myNormal,np.cross(myPlane,myNormal),myPlane]).T otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3 otherPlane /= np.linalg.norm(otherPlane) otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3 otherNormal /= np.linalg.norm(otherNormal) - otherMatrix = np.array([otherPlane,otherNormal,np.cross(otherPlane,otherNormal)]) + otherMatrix = np.array([otherNormal,np.cross(otherPlane,otherNormal),otherPlane]).T - rot=np.dot(otherMatrix.T,myMatrix) + rot=np.dot(otherMatrix,myMatrix.T) return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ?? From 43457dcfc52ae198b861dabf40a095500ad8155d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 21 Sep 2016 17:43:07 +0200 Subject: [PATCH 85/88] flexible selection of axes --- processing/pre/geom_mirror.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/processing/pre/geom_mirror.py b/processing/pre/geom_mirror.py index 61f212039..da4c7f581 100755 --- a/processing/pre/geom_mirror.py +++ b/processing/pre/geom_mirror.py @@ -14,15 +14,24 @@ scriptID = ' '.join([scriptName,damask.version]) # MAIN #-------------------------------------------------------------------------------------------------- +validDirections = ['x','y','z'] parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ -Mirros spectral geometry description along given direction. +Mirrors spectral geometry description along given directions. """, version=scriptID) +parser.add_option('-d','--direction', + dest = 'directions', + action = 'extend', metavar = '', + help = "directions in which to mirror {'x','y','z'}") (options, filenames) = parser.parse_args() - +if options.directions is None: + parser.error('no direction given.') +if not set(options.directions).issubset(validDirections): + invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)] + parser.error('invalid directions {}. '.format(*invalidDirections)) # --- loop over input files ------------------------------------------------------------------------- @@ -57,11 +66,14 @@ for name in filenames: # --- read data ------------------------------------------------------------------------------------ - microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure + microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure - microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2) - microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1) - microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0) + if 'z' in options.directions: + microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2) + if 'y' in options.directions: + microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1) + if 'x' in options.directions: + microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0) # --- do work ------------------------------------------------------------------------------------ From 4cd8fe7f369c1a66e4d104069559e15219276550 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 21 Sep 2016 17:45:23 +0200 Subject: [PATCH 86/88] indices should be integers (visible warning) --- processing/pre/geom_grainGrowth.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index 95c67fb28..2781f90d9 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -89,9 +89,9 @@ for name in filenames: # Calculates gaussian weights for simulating 3d diffusion gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5) - gauss[:,:,grid[2]/2::] = gauss[:,:,round(grid[2]/2.)-1::-1] # trying to cope with uneven (odd) grid size - gauss[:,grid[1]/2::,:] = gauss[:,round(grid[1]/2.)-1::-1,:] - gauss[grid[0]/2::,:,:] = gauss[round(grid[0]/2.)-1::-1,:,:] + gauss[:,:,grid[2]/2::] = gauss[:,:,int(round(grid[2]/2.))-1::-1] # trying to cope with uneven (odd) grid size + gauss[:,grid[1]/2::,:] = gauss[:,int(round(grid[1]/2.))-1::-1,:] + gauss[grid[0]/2::,:,:] = gauss[int(round(grid[0]/2.))-1::-1,:,:] gauss = np.fft.rfftn(gauss) interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise From 6fade1532335b2b14280533daa206850f891eae5 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 21 Sep 2016 17:46:20 +0200 Subject: [PATCH 87/88] no blank lines after docstring --- lib/damask/orientation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/damask/orientation.py b/lib/damask/orientation.py index e28d30a7a..d5274571b 100644 --- a/lib/damask/orientation.py +++ b/lib/damask/orientation.py @@ -1001,7 +1001,6 @@ class Orientation: positive number: fcc --> bcc negative number: bcc --> fcc """ - if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None if int(direction) == 0: return None From 0f636bf6c3c7cde1427a194d9b3f545f2a54ec8a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 21 Sep 2016 17:47:37 +0200 Subject: [PATCH 88/88] not needed --- processing/pre/geom_mirror.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/pre/geom_mirror.py b/processing/pre/geom_mirror.py index da4c7f581..cc51749d4 100755 --- a/processing/pre/geom_mirror.py +++ b/processing/pre/geom_mirror.py @@ -4,7 +4,6 @@ import os,sys,math import numpy as np import damask -from scipy import ndimage from optparse import OptionParser scriptName = os.path.splitext(os.path.basename(__file__))[0]