From f592881f67b97b8b5b9963c5dd039f9f525378b1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 12 Jul 2018 07:12:10 +0200 Subject: [PATCH 01/29] structured --- DAMASK_prerequisites.sh | 101 +++++++++++++++++----------------------- 1 file changed, 43 insertions(+), 58 deletions(-) diff --git a/DAMASK_prerequisites.sh b/DAMASK_prerequisites.sh index 4877d4b22..90289a2b8 100755 --- a/DAMASK_prerequisites.sh +++ b/DAMASK_prerequisites.sh @@ -12,21 +12,38 @@ echo + Send to damask@mpie.de for support echo + view with \'cat $OUTFILE\' echo =========================================== +function firstLevel { +echo -e '\n\n==============================================================================================' +echo $1 +echo ============================================================================================== +} + +function secondLevel { +echo ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +echo $1 +echo ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +} + +function thirdLevel { +echo ---------------------------------------------------------------------------------------------- +echo $1 +echo ---------------------------------------------------------------------------------------------- +} + function getDetails { if which $1 &> /dev/null; then - echo ---------------------------------------------------------------------------------------------- - echo $1: - echo ---------------------------------------------------------------------------------------------- + secondLevel $1: echo + location: which $1 echo + $1 $2: $1 $2 - echo -e '\n' else echo $1 not found fi +echo } + # redirect STDOUT and STDERR to logfile # https://stackoverflow.com/questions/11229385/redirect-all-output-in-a-bash-script-when-using-set-x^ exec > $OUTFILE 2>&1 @@ -38,28 +55,18 @@ DAMASK_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" echo XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX echo System report for \'$(hostname)\' created on $(date '+%Y-%m-%d %H:%M:%S') by \'$(whoami)\' echo XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX -echo -echo ============================================================================================== -echo DAMASK settings -echo ============================================================================================== -echo ---------------------------------------------------------------------------------------------- -echo DAMASK_ROOT: -echo ---------------------------------------------------------------------------------------------- + +firstLevel "DAMASK settings" +secondLevel "DAMASK_ROOT" echo $DAMASK_ROOT echo -echo ---------------------------------------------------------------------------------------------- -echo Version: -echo ---------------------------------------------------------------------------------------------- +secondLevel "Version" cat VERSION echo -echo ---------------------------------------------------------------------------------------------- -echo Settings in CONFIG: -echo ---------------------------------------------------------------------------------------------- +secondLevel "Settings in CONFIG" cat CONFIG -echo -echo ============================================================================================== -echo System -echo ============================================================================================== + +firstLevel "System" uname -a echo echo PATH: $PATH @@ -69,74 +76,52 @@ echo SHELL: $SHELL echo PETSC_ARCH: $PETSC_ARCH echo PETSC_DIR: $PETSC_DIR ls $PETSC_DIR/lib -echo -echo ============================================================================================== -echo Python -echo ============================================================================================== +firstLevel "Python" DEFAULT_PYTHON=python2.7 for executable in python python2 python3 python2.7; do getDetails $executable '--version' done -echo ---------------------------------------------------------------------------------------------- -echo Details on $DEFAULT_PYTHON: -echo ---------------------------------------------------------------------------------------------- +secondLevel "Details on $DEFAULT_PYTHON:" echo $(ls -la $(which $DEFAULT_PYTHON)) for module in numpy scipy;do - echo -e '\n----------------------------------------------------------------------------------------------' - echo $module - echo ---------------------------------------------------------------------------------------------- + thirdLevel $module $DEFAULT_PYTHON -c "import $module; \ print('Version: {}'.format($module.__version__)); \ print('Location: {}'.format($module.__file__))" done -echo ---------------------------------------------------------------------------------------------- -echo vtk -echo ---------------------------------------------------------------------------------------------- +thirdLevel vtk $DEFAULT_PYTHON -c "import vtk; \ print('Version: {}'.format(vtk.vtkVersion.GetVTKVersion())); \ print('Location: {}'.format(vtk.__file__))" -echo ---------------------------------------------------------------------------------------------- -echo h5py -echo ---------------------------------------------------------------------------------------------- +thirdLevel h5py $DEFAULT_PYTHON -c "import h5py; \ print('Version: {}'.format(h5py.version.version)); \ print('Location: {}'.format(h5py.__file__))" -echo -echo ============================================================================================== -echo GCC -echo ============================================================================================== + +firstLevel "GNU Compiler Collection" for executable in gcc g++ gfortran ;do getDetails $executable '--version' done -echo -echo ============================================================================================== -echo Intel Compiler Suite -echo ============================================================================================== + +firstLevel "Intel Compiler Suite" for executable in icc icpc ifort ;do getDetails $executable '--version' done -echo -echo ============================================================================================== -echo MPI Wrappers -echo ============================================================================================== + +firstLevel "MPI Wrappers" for executable in mpicc mpiCC mpic++ mpicpc mpicxx mpifort mpif90 mpif77; do getDetails $executable '-show' done -echo -echo ============================================================================================== -echo MPI Launchers -echo ============================================================================================== + +firstLevel "MPI Launchers" for executable in mpirun mpiexec; do getDetails $executable '--version' done -echo -echo ============================================================================================== -echo Abaqus -echo ============================================================================================== + +firstLevel "Abaqus" cd installation/mods_Abaqus # to have the right environment file for executable in abaqus abq2016 abq2017; do getDetails $executable 'information=all' done cd ../.. - From ef6ffc94e7f4cdcbea618b1b0a93f189effaf47a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 18 Jul 2018 23:40:49 +0200 Subject: [PATCH 02/29] simplified system report --- DAMASK_prerequisites.sh | 4 ++-- PRIVATE | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/DAMASK_prerequisites.sh b/DAMASK_prerequisites.sh index 90289a2b8..b5acede32 100755 --- a/DAMASK_prerequisites.sh +++ b/DAMASK_prerequisites.sh @@ -25,7 +25,7 @@ echo +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ } function thirdLevel { -echo ---------------------------------------------------------------------------------------------- +echo -e '\n----------------------------------------------------------------------------------------------' echo $1 echo ---------------------------------------------------------------------------------------------- } @@ -121,7 +121,7 @@ done firstLevel "Abaqus" cd installation/mods_Abaqus # to have the right environment file -for executable in abaqus abq2016 abq2017; do +for executable in abaqus abq2017 abq2018; do getDetails $executable 'information=all' done cd ../.. diff --git a/PRIVATE b/PRIVATE index d1d465808..0c9db9b75 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit d1d46580823d2896059b9514ddc975f9fe5f6b1f +Subproject commit 0c9db9b7542e7e1c3cac96e4821be9d9a7505a9d From a3682d4876b9c3399ef36dfc6229390aca00cc1c Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 19 Jul 2018 06:58:39 +0200 Subject: [PATCH 03/29] [skip ci] updated version information after successful test of v2.0.2-232-gef6ffc94 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 8a75e99ff..1779757ff 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-228-ge08b7325 +v2.0.2-232-gef6ffc94 From beb418eb4499c15e312da70589c48e89d427b9ce Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 15:20:34 +0200 Subject: [PATCH 04/29] python3 compatible way ugly, but works also for python2.7. Probably there is a better solution when 2.7 is deprecated --- lib/damask/util.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/damask/util.py b/lib/damask/util.py index 413f955e9..fde9b35a3 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -59,9 +59,9 @@ def report_geom(info, what = ['grid','size','origin','homogenization','microstructures']): """Reports (selected) geometry information""" output = { - 'grid' : 'grid a b c: {}'.format(' x '.join(map(str,info['grid' ]))), - 'size' : 'size x y z: {}'.format(' x '.join(map(str,info['size' ]))), - 'origin' : 'origin x y z: {}'.format(' : '.join(map(str,info['origin']))), + 'grid' : 'grid a b c: {}'.format(' x '.join(list(map(str,info['grid' ])))), + 'size' : 'size x y z: {}'.format(' x '.join(list(map(str,info['size' ])))), + 'origin' : 'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))), 'homogenization' : 'homogenization: {}'.format(info['homogenization']), 'microstructures' : 'microstructures: {}'.format(info['microstructures']), } @@ -103,9 +103,9 @@ def coordGridAndSize(coordinates): """Determines grid count and overall physical size along each dimension of an ordered array of coordinates""" dim = coordinates.shape[1] coords = [np.unique(coordinates[:,i]) for i in range(dim)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') + mincorner = np.array(list(map(min,coords))) + maxcorner = np.array(list(map(max,coords))) + grid = np.array(list(map(len,coords)),'i') size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones return grid,size From 6dd970dfe01b7b243f57456b113b65a876aeff56 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 15:46:14 +0200 Subject: [PATCH 05/29] logic seemed to be broken (only filter out strings which have a 'strip' attribute string in python 3 have both attributes, '__iter__' and '__getitem'. Old syntax therefore split up strings into characters --- lib/damask/util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/damask/util.py b/lib/damask/util.py index fde9b35a3..93387205e 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -36,8 +36,8 @@ class bcolors: def srepr(arg,glue = '\n'): """Joins arguments as individual lines""" if (not hasattr(arg, "strip") and - hasattr(arg, "__getitem__") or - hasattr(arg, "__iter__")): + (hasattr(arg, "__getitem__") or + hasattr(arg, "__iter__"))): return glue.join(str(x) for x in arg) return arg if isinstance(arg,str) else repr(arg) From d138993c1d8212608637b62a9f265a6df897d21d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:12:36 +0200 Subject: [PATCH 06/29] using util functionality to be compatible with python3 --- processing/post/addCompatibilityMismatch.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/processing/post/addCompatibilityMismatch.py b/processing/post/addCompatibilityMismatch.py index 51e5f5eab..b798acdbd 100755 --- a/processing/post/addCompatibilityMismatch.py +++ b/processing/post/addCompatibilityMismatch.py @@ -282,19 +282,12 @@ for name in filenames: table.data_readArray([options.defgrad,options.pos]) table.data_rewind() - if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape if table.data[:,9:].shape[1] < 3: table.data = np.hstack((table.data, np.zeros((table.data.shape[0], 3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros - coords = [np.unique(table.data[:,9+i]) for i in range(3)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') - size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) - size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings - + grid,size = damask.util.coordGridAndSize(table.data[:,9:12]) N = grid.prod() if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid)) From 9821654aae448686d1fe43785839b546b7a9b02c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:16:10 +0200 Subject: [PATCH 07/29] iteritems does not exist in python3 anymore items also works for python2 (might be slower though) --- processing/post/addDeterminant.py | 4 ++-- processing/post/addDeviator.py | 6 +++--- processing/post/addGaussian.py | 6 +++--- processing/post/addMises.py | 4 ++-- processing/post/addSpectralDecomposition.py | 4 ++-- processing/post/addStrainTensors.py | 2 +- processing/post/rotateData.py | 2 +- processing/pre/geom_translate.py | 2 +- processing/pre/patchFromReconstructedBoundaries.py | 2 +- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/processing/post/addDeterminant.py b/processing/post/addDeterminant.py index 1f721c27e..7196051e5 100755 --- a/processing/post/addDeterminant.py +++ b/processing/post/addDeterminant.py @@ -58,7 +58,7 @@ for name in filenames: errors = [] remarks = [] - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type)) @@ -81,7 +81,7 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - for type, data in items.iteritems(): + for type, data in items.items(): for column in data['column']: table.data_append(determinant(map(float,table.data[column: column+data['dim']]))) diff --git a/processing/post/addDeviator.py b/processing/post/addDeviator.py index 471c2635f..4df8a6803 100755 --- a/processing/post/addDeviator.py +++ b/processing/post/addDeviator.py @@ -66,7 +66,7 @@ for name in filenames: remarks = [] column = {} - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type)) @@ -83,7 +83,7 @@ for name in filenames: # ------------------------------------------ assemble header -------------------------------------- table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - for type, data in items.iteritems(): + for type, data in items.items(): for label in data['active']: table.labels_append(['{}_dev({})'.format(i+1,label) for i in range(data['dim'])] + \ (['sph({})'.format(label)] if options.spherical else [])) # extend ASCII header with new labels @@ -93,7 +93,7 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - for type, data in items.iteritems(): + for type, data in items.items(): for column in data['column']: table.data_append(deviator(map(float,table.data[column: column+data['dim']]),options.spherical)) diff --git a/processing/post/addGaussian.py b/processing/post/addGaussian.py index c198ef62f..bc0100f56 100755 --- a/processing/post/addGaussian.py +++ b/processing/post/addGaussian.py @@ -83,7 +83,7 @@ for name in filenames: if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos)) else: colCoord = table.label_index(options.pos) - for type, data in items.iteritems(): + for type, data in items.items(): for what in (data['labels'] if data['labels'] is not None else []): dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type)) @@ -100,7 +100,7 @@ for name in filenames: # ------------------------------------------ assemble header -------------------------------------- table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - for type, data in items.iteritems(): + for type, data in items.items(): for label in data['active']: table.labels_append(['Gauss{}({})'.format(options.sigma,label)]) # extend ASCII header with new labels table.head_write() @@ -114,7 +114,7 @@ for name in filenames: # ------------------------------------------ process value field ----------------------------------- stack = [table.data] - for type, data in items.iteritems(): + for type, data in items.items(): for i,label in enumerate(data['active']): stack.append(ndimage.filters.gaussian_filter(table.data[:,data['column'][i]], options.sigma,options.order, diff --git a/processing/post/addMises.py b/processing/post/addMises.py index 2ce350dbd..4719c2e35 100755 --- a/processing/post/addMises.py +++ b/processing/post/addMises.py @@ -70,7 +70,7 @@ for name in filenames: errors = [] remarks = [] - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type)) @@ -94,7 +94,7 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - for type, data in items.iteritems(): + for type, data in items.items(): for column in data['column']: table.data_append(Mises(type, np.array(table.data[column:column+data['dim']],'d').reshape(data['shape']))) diff --git a/processing/post/addSpectralDecomposition.py b/processing/post/addSpectralDecomposition.py index 76bf2e875..b21900c0c 100755 --- a/processing/post/addSpectralDecomposition.py +++ b/processing/post/addSpectralDecomposition.py @@ -58,7 +58,7 @@ for name in filenames: errors = [] remarks = [] - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type)) @@ -84,7 +84,7 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - for type, data in items.iteritems(): + for type, data in items.items(): for column in data['column']: (u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape'])) if options.rh and np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed eigenvector basis diff --git a/processing/post/addStrainTensors.py b/processing/post/addStrainTensors.py index 447ae03ba..7cb9f3079 100755 --- a/processing/post/addStrainTensors.py +++ b/processing/post/addStrainTensors.py @@ -101,7 +101,7 @@ for name in filenames: errors = [] remarks = [] - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type)) diff --git a/processing/post/rotateData.py b/processing/post/rotateData.py index 08958cc86..c5e4882b9 100755 --- a/processing/post/rotateData.py +++ b/processing/post/rotateData.py @@ -73,7 +73,7 @@ for name in filenames: remarks = [] column = {} - for type, data in items.iteritems(): + for type, data in items.items(): for what in data['labels']: dim = table.label_dimension(what) if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type)) diff --git a/processing/pre/geom_translate.py b/processing/pre/geom_translate.py index f8f6e4169..2f4918632 100755 --- a/processing/pre/geom_translate.py +++ b/processing/pre/geom_translate.py @@ -92,7 +92,7 @@ for name in filenames: } substituted = np.copy(microstructure) - for k, v in sub.iteritems(): substituted[microstructure==k] = v # substitute microstructure indices + for k, v in sub.items(): substituted[microstructure==k] = v # substitute microstructure indices substituted += options.microstructure # shift microstructure indices diff --git a/processing/pre/patchFromReconstructedBoundaries.py b/processing/pre/patchFromReconstructedBoundaries.py index a43ccc236..fabec0fdf 100755 --- a/processing/pre/patchFromReconstructedBoundaries.py +++ b/processing/pre/patchFromReconstructedBoundaries.py @@ -344,7 +344,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): else: myNeighbors[grainNeighbors[leg][side]] = 1 if myNeighbors: # do I have any neighbors (i.e., non-bounding box segment) - candidateGrains = sorted(myNeighbors.iteritems(), key=lambda p: (p[1],p[0]), reverse=True) # sort grain counting + candidateGrains = sorted(myNeighbors.items(), key=lambda p: (p[1],p[0]), reverse=True) # sort grain counting # most frequent one not yet seen? rcData['grainMapping'].append(candidateGrains[0 if candidateGrains[0][0] not in rcData['grainMapping'] else 1][0]) # must be me then # special case of bi-crystal situation... From 35e470ff4d500a81587c764f94f5146e5547b00d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:19:05 +0200 Subject: [PATCH 08/29] needed for python3 --- processing/post/addCauchy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/post/addCauchy.py b/processing/post/addCauchy.py index a21d91064..43717c975 100755 --- a/processing/post/addCauchy.py +++ b/processing/post/addCauchy.py @@ -75,8 +75,8 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - F = np.array(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9]),'d').reshape(3,3) - P = np.array(map(float,table.data[column[options.stress ]:column[options.stress ]+9]),'d').reshape(3,3) + F = np.array(list(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9])),'d').reshape(3,3) + P = np.array(list(map(float,table.data[column[options.stress ]:column[options.stress ]+9])),'d').reshape(3,3) table.data_append(list(1.0/np.linalg.det(F)*np.dot(P,F.T).reshape(9))) # [Cauchy] = (1/det(F)) * [P].[F_transpose] outputAlive = table.data_write() # output processed line From f928bd5e5b3d958b885891555978000e4e413345 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:26:30 +0200 Subject: [PATCH 09/29] more python 3 compatibility --- processing/post/addDeterminant.py | 5 ++--- processing/post/addDeviator.py | 4 ++-- processing/post/addDisplacement.py | 8 +------- processing/post/addEhkl.py | 4 ++-- processing/post/addEuclideanDistance.py | 10 ++-------- 5 files changed, 9 insertions(+), 22 deletions(-) diff --git a/processing/post/addDeterminant.py b/processing/post/addDeterminant.py index 7196051e5..6d992b6f5 100755 --- a/processing/post/addDeterminant.py +++ b/processing/post/addDeterminant.py @@ -83,10 +83,9 @@ for name in filenames: while outputAlive and table.data_read(): # read next data line of ASCII table for type, data in items.items(): for column in data['column']: - table.data_append(determinant(map(float,table.data[column: - column+data['dim']]))) + table.data_append(determinant(list(map(float,table.data[column: column+data['dim']])))) outputAlive = table.data_write() # output processed line # ------------------------------------------ output finalization ----------------------------------- - table.close() # close input ASCII table (works for stdin) \ No newline at end of file + table.close() # close input ASCII table (works for stdin) diff --git a/processing/post/addDeviator.py b/processing/post/addDeviator.py index 4df8a6803..86fcac509 100755 --- a/processing/post/addDeviator.py +++ b/processing/post/addDeviator.py @@ -95,8 +95,8 @@ for name in filenames: while outputAlive and table.data_read(): # read next data line of ASCII table for type, data in items.items(): for column in data['column']: - table.data_append(deviator(map(float,table.data[column: - column+data['dim']]),options.spherical)) + table.data_append(deviator(list(map(float,table.data[column: + column+data['dim']])),options.spherical)) outputAlive = table.data_write() # output processed line # ------------------------------------------ output finalization ----------------------------------- diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index bc1d7377b..00132d7c6 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -168,13 +168,7 @@ for name in filenames: np.zeros((table.data.shape[0], 3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros - coords = [np.unique(table.data[:,9+i]) for i in range(3)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') - size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) - size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings - + grid,size = damask.util.coordGridAndSize(table.data[:,9:12]) N = grid.prod() if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid)) diff --git a/processing/post/addEhkl.py b/processing/post/addEhkl.py index f7a143466..59f678118 100755 --- a/processing/post/addEhkl.py +++ b/processing/post/addEhkl.py @@ -88,9 +88,9 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table for column in columns: - table.data_append(E_hkl(map(float,table.data[column:column+3]),options.hkl)) + table.data_append(E_hkl(list(map(float,table.data[column:column+3])),options.hkl)) outputAlive = table.data_write() # output processed line # ------------------------------------------ output finalization ----------------------------------- - table.close() # close ASCII tables \ No newline at end of file + table.close() # close ASCII tables diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index b83c36b6c..b3f972fc7 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -151,10 +151,8 @@ for name in filenames: remarks = [] column = {} - coordDim = table.label_dimension(options.pos) - if not 3 >= coordDim >= 1: + if not 3 >= table.label_dimension(options.pos) >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos)) - else: coordCol = table.label_index(options.pos) if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id)) else: idCol = table.label_index(options.id) @@ -178,11 +176,7 @@ for name in filenames: table.data_readArray() - coords = [np.unique(table.data[:,coordCol+i]) for i in range(coordDim)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i') - + grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) N = grid.prod() if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid)))) From 0438b7113a791dc9b08154da11bdda9893a7feb4 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:34:04 +0200 Subject: [PATCH 10/29] using util function where possible, ensuring python3 compatibility --- processing/post/addPole.py | 12 ++++++------ processing/post/averageDown.py | 25 ++++++++----------------- processing/post/blowUp.py | 10 ++-------- 3 files changed, 16 insertions(+), 31 deletions(-) diff --git a/processing/post/addPole.py b/processing/post/addPole.py index 10c5cce67..95bc87637 100755 --- a/processing/post/addPole.py +++ b/processing/post/addPole.py @@ -120,15 +120,15 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table if inputtype == 'eulers': - o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians) + o = damask.Orientation(Eulers = np.array(list(map(float,table.data[column:column+3])))*toRadians) elif inputtype == 'matrix': - o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose()) + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column:column+9]))).reshape(3,3).transpose()) elif inputtype == 'frame': - o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \ - table.data[column[1]:column[1]+3] + \ - table.data[column[2]:column[2]+3])).reshape(3,3)) + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column[0]:column[0]+3] + \ + table.data[column[1]:column[1]+3] + \ + table.data[column[2]:column[2]+3]))).reshape(3,3)) elif inputtype == 'quaternion': - o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4]))) + o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4])))) rotatedPole = o.quaternion*pole # rotate pole according to crystal orientation (x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection diff --git a/processing/post/averageDown.py b/processing/post/averageDown.py index 886083428..501ca3b3c 100755 --- a/processing/post/averageDown.py +++ b/processing/post/averageDown.py @@ -76,7 +76,6 @@ for name in filenames: remarks = [] if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos)) - else: colCoord = table.label_index(options.pos) if remarks != []: damask.util.croak(remarks) if errors != []: @@ -94,14 +93,7 @@ for name in filenames: table.data_readArray() if (any(options.grid) == 0 or any(options.size) == 0.0): - coords = [np.unique(table.data[:,colCoord+i]) for i in range(3)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') - size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) - size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings - delta = size/np.maximum(np.ones(3,'d'), grid) - origin = mincorner - 0.5*delta # shift from cell center to corner + grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) else: grid = np.array(options.grid,'i') @@ -129,16 +121,15 @@ for name in filenames: #--- generate grid -------------------------------------------------------------------------------- - if colCoord: - x = (0.5 + shift[0] + np.arange(packedGrid[0],dtype=float))/packedGrid[0]*size[0] + origin[0] - y = (0.5 + shift[1] + np.arange(packedGrid[1],dtype=float))/packedGrid[1]*size[1] + origin[1] - z = (0.5 + shift[2] + np.arange(packedGrid[2],dtype=float))/packedGrid[2]*size[2] + origin[2] + x = (0.5 + shift[0] + np.arange(packedGrid[0],dtype=float))/packedGrid[0]*size[0] + origin[0] + y = (0.5 + shift[1] + np.arange(packedGrid[1],dtype=float))/packedGrid[1]*size[1] + origin[1] + z = (0.5 + shift[2] + np.arange(packedGrid[2],dtype=float))/packedGrid[2]*size[2] + origin[2] - xx = np.tile( x, packedGrid[1]* packedGrid[2]) - yy = np.tile(np.repeat(y,packedGrid[0] ),packedGrid[2]) - zz = np.repeat(z,packedGrid[0]*packedGrid[1]) + xx = np.tile( x, packedGrid[1]* packedGrid[2]) + yy = np.tile(np.repeat(y,packedGrid[0] ),packedGrid[2]) + zz = np.repeat(z,packedGrid[0]*packedGrid[1]) - table.data[:,colCoord:colCoord+3] = np.squeeze(np.dstack((xx,yy,zz))) + table.data[:,table.label_indexragen(options.pos)] = np.squeeze(np.dstack((xx,yy,zz))) # ------------------------------------------ output result ----------------------------------------- diff --git a/processing/post/blowUp.py b/processing/post/blowUp.py index 0642deab1..5a0d631e0 100755 --- a/processing/post/blowUp.py +++ b/processing/post/blowUp.py @@ -64,7 +64,6 @@ for name in filenames: remarks = [] if table.label_dimension(options.pos) != 3: errors.append('coordinates "{}" are not a vector.'.format(options.pos)) - else: colCoord = table.label_index(options.pos) colElem = table.label_index('elem') @@ -79,12 +78,7 @@ for name in filenames: table.data_readArray(options.pos) table.data_rewind() - coords = [np.unique(table.data[:,i]) for i in range(3)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') - size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) - size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings + grid,size = damask.util.coordGridAndSize(table.data) packing = np.array(options.packing,'i') outSize = grid*packing @@ -113,7 +107,7 @@ for name in filenames: for c in range(outSize[2]): for b in range(outSize[1]): for a in range(outSize[0]): - data[a,b,c,colCoord:colCoord+3] = [a+0.5,b+0.5,c+0.5]*elementSize + data[a,b,c,table.label_indexrange(options.pos)] = [a+0.5,b+0.5,c+0.5]*elementSize if colElem != -1: data[a,b,c,colElem] = elem table.data = data[a,b,c,:].tolist() outputAlive = table.data_write() # output processed line From 1f637a0c49bf3cb8022b27bb7ac6cf6412eb6e99 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 19 Jul 2018 16:53:48 +0200 Subject: [PATCH 11/29] python3 compatible map requires conversion to list --- processing/post/addEuclideanDistance.py | 2 +- processing/post/addIPFcolor.py | 10 +++++----- processing/post/addOrientations.py | 16 ++++++++-------- processing/post/addPK2.py | 4 ++-- processing/post/addSchmidfactors.py | 12 ++++++------ processing/post/addSpectralDecomposition.py | 2 +- processing/post/addStrainTensors.py | 2 +- processing/post/rotateData.py | 4 ++-- processing/post/vtk2ang.py | 2 -- processing/post/vtk_rectilinearGrid.py | 2 +- 10 files changed, 27 insertions(+), 29 deletions(-) diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index b3f972fc7..d99eaaa8c 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -102,7 +102,7 @@ parser.add_option('-t', help = 'feature type {{{}}} '.format(', '.join(map(lambda x:'/'.join(x['names']),features))) ) parser.add_option('-n', '--neighborhood', - dest = 'neighborhood', choices = neighborhoods.keys(), metavar = 'string', + dest = 'neighborhood', choices = list(neighborhoods.keys()), metavar = 'string', help = 'neighborhood type [neumann] {{{}}}'.format(', '.join(neighborhoods.keys()))) parser.add_option('-s', '--scale', diff --git a/processing/post/addIPFcolor.py b/processing/post/addIPFcolor.py index 2fcc000e1..fd93b45a0 100755 --- a/processing/post/addIPFcolor.py +++ b/processing/post/addIPFcolor.py @@ -116,18 +116,18 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table if inputtype == 'eulers': - o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians, + o = damask.Orientation(Eulers = np.array(list(map(float,table.data[column:column+3])))*toRadians, symmetry = options.symmetry).reduced() elif inputtype == 'matrix': - o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(), + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column:column+9]))).reshape(3,3).transpose(), symmetry = options.symmetry).reduced() elif inputtype == 'frame': - o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \ + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column[0]:column[0]+3] + \ table.data[column[1]:column[1]+3] + \ - table.data[column[2]:column[2]+3])).reshape(3,3), + table.data[column[2]:column[2]+3]))).reshape(3,3), symmetry = options.symmetry).reduced() elif inputtype == 'quaternion': - o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])), + o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4]))), symmetry = options.symmetry).reduced() table.data_append(o.IPFcolor(pole)) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index dc23b351e..e7948c842 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -80,7 +80,7 @@ parser.set_defaults(output = [], (options, filenames) = parser.parse_args() -options.output = map(lambda x: x.lower(), options.output) +options.output = list(map(lambda x: x.lower(), options.output)) if options.output == [] or (not set(options.output).issubset(set(outputChoices))): parser.error('output must be chosen from {}.'.format(', '.join(outputChoices))) @@ -147,21 +147,21 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table if inputtype == 'eulers': - o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians, + o = damask.Orientation(Eulers = np.array(list(map(float,table.data[column:column+3])))*toRadians, symmetry = options.symmetry).reduced() elif inputtype == 'rodrigues': - o = damask.Orientation(Rodrigues= np.array(map(float,table.data[column:column+3])), + o = damask.Orientation(Rodrigues= np.array(list(map(float,table.data[column:column+3]))), symmetry = options.symmetry).reduced() elif inputtype == 'matrix': - o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(), + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column:column+9]))).reshape(3,3).transpose(), symmetry = options.symmetry).reduced() elif inputtype == 'frame': - o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \ - table.data[column[1]:column[1]+3] + \ - table.data[column[2]:column[2]+3])).reshape(3,3), + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column[0]:column[0]+3] + \ + table.data[column[1]:column[1]+3] + \ + table.data[column[2]:column[2]+3]))).reshape(3,3), symmetry = options.symmetry).reduced() elif inputtype == 'quaternion': - o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])), + o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4]))), symmetry = options.symmetry).reduced() o.quaternion = r*o.quaternion*R # apply additional lab and crystal frame rotations diff --git a/processing/post/addPK2.py b/processing/post/addPK2.py index 9e6308c39..82898efde 100755 --- a/processing/post/addPK2.py +++ b/processing/post/addPK2.py @@ -75,8 +75,8 @@ for name in filenames: # ------------------------------------------ process data ------------------------------------------ outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - F = np.array(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9]),'d').reshape(3,3) - P = np.array(map(float,table.data[column[options.stress ]:column[options.stress ]+9]),'d').reshape(3,3) + F = np.array(list(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9])),'d').reshape(3,3) + P = np.array(list(map(float,table.data[column[options.stress ]:column[options.stress ]+9])),'d').reshape(3,3) table.data_append(list(np.dot(np.linalg.inv(F),P).reshape(9))) # [S] =[P].[F-1] outputAlive = table.data_write() # output processed line diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index 4f34621b7..81f240ac1 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -252,15 +252,15 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table if inputtype == 'eulers': - o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,) + o = damask.Orientation(Eulers = np.array(list(map(float,table.data[column:column+3])))*toRadians,) elif inputtype == 'matrix': - o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(),) + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column:column+9]))).reshape(3,3).transpose(),) elif inputtype == 'frame': - o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \ - table.data[column[1]:column[1]+3] + \ - table.data[column[2]:column[2]+3])).reshape(3,3),) + o = damask.Orientation(matrix = np.array(list(map(float,table.data[column[0]:column[0]+3] + \ + table.data[column[1]:column[1]+3] + \ + table.data[column[2]:column[2]+3]))).reshape(3,3),) elif inputtype == 'quaternion': - o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),) + o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4]))),) rotForce = o.quaternion.conjugated() * force rotNormal = o.quaternion.conjugated() * normal diff --git a/processing/post/addSpectralDecomposition.py b/processing/post/addSpectralDecomposition.py index b21900c0c..6eea8bee2 100755 --- a/processing/post/addSpectralDecomposition.py +++ b/processing/post/addSpectralDecomposition.py @@ -86,7 +86,7 @@ for name in filenames: while outputAlive and table.data_read(): # read next data line of ASCII table for type, data in items.items(): for column in data['column']: - (u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape'])) + (u,v) = np.linalg.eigh(np.array(list(map(float,table.data[column:column+data['dim']]))).reshape(data['shape'])) if options.rh and np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed eigenvector basis table.data_append(list(u)) # vector of max,mid,min eigval table.data_append(list(v.transpose().reshape(data['dim']))) # 3x3=9 combo vector of max,mid,min eigvec coordinates diff --git a/processing/post/addStrainTensors.py b/processing/post/addStrainTensors.py index 7cb9f3079..14d66d5f6 100755 --- a/processing/post/addStrainTensors.py +++ b/processing/post/addStrainTensors.py @@ -132,7 +132,7 @@ for name in filenames: while outputAlive and table.data_read(): # read next data line of ASCII table for column in items['tensor']['column']: # loop over all requested defgrads - F = np.array(map(float,table.data[column:column+items['tensor']['dim']]),'d').reshape(items['tensor']['shape']) + F = np.array(list(map(float,table.data[column:column+items['tensor']['dim']])),'d').reshape(items['tensor']['shape']) (U,S,Vh) = np.linalg.svd(F) # singular value decomposition R = np.dot(U,Vh) # rotation of polar decomposition stretch['U'] = np.dot(np.linalg.inv(R),F) # F = RU diff --git a/processing/post/rotateData.py b/processing/post/rotateData.py index c5e4882b9..ce8156038 100755 --- a/processing/post/rotateData.py +++ b/processing/post/rotateData.py @@ -100,13 +100,13 @@ for name in filenames: for column in items[datatype]['column']: # loop over all requested labels table.data[column:column+items[datatype]['dim']] = \ - q * np.array(map(float,table.data[column:column+items[datatype]['dim']])) + q * np.array(list(map(float,table.data[column:column+items[datatype]['dim']]))) datatype = 'tensor' for column in items[datatype]['column']: # loop over all requested labels table.data[column:column+items[datatype]['dim']] = \ - np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\ + np.dot(R,np.dot(np.array(list(map(float,table.data[column:column+items[datatype]['dim']]))).\ reshape(items[datatype]['shape']),R.transpose())).reshape(items[datatype]['dim']) outputAlive = table.data_write() # output processed line diff --git a/processing/post/vtk2ang.py b/processing/post/vtk2ang.py index 6da07bc02..123dc5b98 100755 --- a/processing/post/vtk2ang.py +++ b/processing/post/vtk2ang.py @@ -421,8 +421,6 @@ for filename in filenames: meshActor.GetProperty().SetOpacity(0.2) meshActor.GetProperty().SetColor(1.0,1.0,0) meshActor.GetProperty().BackfaceCullingOn() - # meshActor.GetProperty().SetEdgeColor(1,1,0.5) - # meshActor.GetProperty().EdgeVisibilityOn() boxpoints = vtk.vtkPoints() for n in range(8): diff --git a/processing/post/vtk_rectilinearGrid.py b/processing/post/vtk_rectilinearGrid.py index 326f26046..d01d118cb 100755 --- a/processing/post/vtk_rectilinearGrid.py +++ b/processing/post/vtk_rectilinearGrid.py @@ -82,7 +82,7 @@ for name in filenames: [coords[i][j-1] + coords[i][j] for j in range(1,len(coords[i]))] + \ [3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in range(3)] - grid = np.array(map(len,coords),'i') + grid = np.array(list(map(len,coords)),'i') N = grid.prod() if options.mode == 'point' else (grid-1).prod() if N != len(table.data): From 1384fdead1fb139c16d1a91743a5ed68a8569359 Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 19 Jul 2018 23:56:47 +0200 Subject: [PATCH 12/29] [skip ci] updated version information after successful test of v2.0.2-241-g800f86e4 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 1779757ff..8f6be5bd3 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-232-gef6ffc94 +v2.0.2-241-g800f86e4 From 784ae28dbb6265b10ac4a84d922d2084c9f68125 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 00:39:50 +0200 Subject: [PATCH 13/29] now working with python 3 --- processing/pre/geom_fromOsteonGeometry.py | 12 ++++-------- processing/pre/geom_fromTable.py | 8 ++++---- processing/pre/geom_fromVoronoiTessellation.py | 3 +-- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/processing/pre/geom_fromOsteonGeometry.py b/processing/pre/geom_fromOsteonGeometry.py index 716a43615..807e5200e 100755 --- a/processing/pre/geom_fromOsteonGeometry.py +++ b/processing/pre/geom_fromOsteonGeometry.py @@ -55,9 +55,9 @@ parser.set_defaults(canal = 25e-6, (options,filename) = parser.parse_args() -if np.any(options.grid < 2): +if np.any(np.array(options.grid) < 2): parser('invalid grid a b c.') -if np.any(options.size <= 0.0): +if np.any(np.array(options.size) <= 0.0): parser('invalid size x y z.') # --- open input files ---------------------------------------------------------------------------- @@ -114,12 +114,8 @@ for y in range(info['grid'][1]): info['microstructures'] += 1 #--- report --------------------------------------------------------------------------------------- -damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), - 'size x y z: %s'%(' x '.join(map(str,info['size']))), - 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), - 'homogenization: %i'%info['homogenization'], - 'microstructures: %i'%info['microstructures']]) -# -------------------------------------- switch according to task ---------------------------------- +damask.util.report_geom(info,['grid','size','origin','homogenization','microstructures']) + formatwidth = 1+int(math.floor(math.log10(info['microstructures']-1))) header = [scriptID + ' ' + ' '.join(sys.argv[1:])] header.append('') diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index b10bc9f88..6cdf4b76e 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -152,7 +152,7 @@ for name in filenames: continue table.data_readArray([options.pos] \ - + ([label] if isinstance(label, types.StringTypes) else label) \ + + (label if isinstance(label, list) else [label]) \ + ([options.phase] if options.phase else [])) if coordDim == 2: @@ -165,9 +165,9 @@ for name in filenames: # --------------- figure out size and grid --------------------------------------------------------- coords = [np.unique(table.data[:,i]) for i in range(3)] - mincorner = np.array(map(min,coords)) - maxcorner = np.array(map(max,coords)) - grid = np.array(map(len,coords),'i') + mincorner = np.array(list(map(min,coords))) + maxcorner = np.array(list(map(max,coords))) + grid = np.array(list(map(len,coords)),'i') size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings delta = size/np.maximum(np.ones(3,'d'), grid) diff --git a/processing/pre/geom_fromVoronoiTessellation.py b/processing/pre/geom_fromVoronoiTessellation.py index 4dcb5b40f..f57f1d35e 100755 --- a/processing/pre/geom_fromVoronoiTessellation.py +++ b/processing/pre/geom_fromVoronoiTessellation.py @@ -15,8 +15,7 @@ scriptID = ' '.join([scriptName,damask.version]) def meshgrid2(*arrs): """Code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d""" arrs = tuple(reversed(arrs)) - arrs = tuple(arrs) - lens = np.array(map(len, arrs)) + lens = np.array(list(map(len, arrs))) dim = len(arrs) ans = [] for i, arr in enumerate(arrs): From 2632be2a7634b87ddecd29a1eb4ddaa6588ce812 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 03:09:53 +0200 Subject: [PATCH 14/29] polishing --- lib/damask/util.py | 6 ++++-- processing/post/addCurl.py | 1 - 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/damask/util.py b/lib/damask/util.py index 93387205e..8727a1473 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -93,8 +93,10 @@ def execute(cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE, stdin = subprocess.PIPE) - out,error = [i.replace(b"\x08",b"") for i in (process.communicate() if streamIn is None - else process.communicate(streamIn.read()))] + out,error = [i for i in (process.communicate() if streamIn is None + else process.communicate(streamIn.read().encode('utf-8')))] + out = out.decode('utf-8').replace('\x08','') + error = error.decode('utf-8').replace('\x08','') os.chdir(initialPath) if process.returncode != 0: raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode)) return out,error diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 5ca851b22..52a4ae438 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -138,7 +138,6 @@ for name in filenames: # --------------- figure out size and grid --------------------------------------------------------- table.data_readArray() - grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) # ------------------------------------------ process value field ----------------------------------- From b59145fca5761b627e086eb642ed9c78ba4fe3fa Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 03:11:10 +0200 Subject: [PATCH 15/29] also using python 3 compatible tests --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 0c9db9b75..4cbe7024b 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 0c9db9b7542e7e1c3cac96e4821be9d9a7505a9d +Subproject commit 4cbe7024b4ebd1ef3ee35fbf8b9676f1c377f462 From 24d1528e04c77d3a22e50a7a76314978f2f5e3dc Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 03:31:46 +0200 Subject: [PATCH 16/29] on the way to full python 3 compatibility --- PRIVATE | 2 +- processing/pre/hybridIA_linODFsampling.py | 2 +- processing/pre/mentat_spectralBox.py | 13 ++++++++++--- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/PRIVATE b/PRIVATE index 4cbe7024b..12ecac5ad 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 4cbe7024b4ebd1ef3ee35fbf8b9676f1c377f462 +Subproject commit 12ecac5ad75a160d5ee8e2b18b752fdab11dfa0d diff --git a/processing/pre/hybridIA_linODFsampling.py b/processing/pre/hybridIA_linODFsampling.py index e4735674a..d1b0efd57 100755 --- a/processing/pre/hybridIA_linODFsampling.py +++ b/processing/pre/hybridIA_linODFsampling.py @@ -270,7 +270,7 @@ for name in filenames: ODF['limit'] = np.radians(limits[1,:]) # right hand limits in radians ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered - ODF['interval'] = np.array(map(len,[np.unique(table.data[:,i]) for i in range(3)]),'i') # steps are number of distict values + ODF['interval'] = np.array(list(map(len,[np.unique(table.data[:,i]) for i in range(3)])),'i') # steps are number of distict values ODF['nBins'] = ODF['interval'].prod() ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1)) # step size diff --git a/processing/pre/mentat_spectralBox.py b/processing/pre/mentat_spectralBox.py index 16c982f82..0299b35dc 100755 --- a/processing/pre/mentat_spectralBox.py +++ b/processing/pre/mentat_spectralBox.py @@ -77,7 +77,14 @@ def mesh(r,d): "%f %f %f"%(-d[0],d[1],d[2]), "%f %f %f"%(-d[0],d[1],0.0), "*add_elements", - range(1,9), + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", "*sub_divisions", "%i %i %i"%(r[2],r[1],r[0]), "*subdivide_elements", @@ -201,7 +208,7 @@ if options.port: except: parser.error('no valid Mentat release found.') -# --- loop over input files ------------------------------------------------------------------------- +# --- loop over input files ------------------------------------------------------------------------ if filenames == []: filenames = [None] @@ -236,7 +243,7 @@ for name in filenames: # --- read data ------------------------------------------------------------------------------------ - microstructure = table.microstructure_read(info['grid']).reshape(info['grid'].prod(),order='F') # read microstructure + microstructure = table.microstructure_read(info['grid']).reshape(info['grid'].prod(),order='F') # read microstructure cmds = [\ init(), From 0c67f28178450fd7dad741d5bea2577b3f29ce93 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 03:46:00 +0200 Subject: [PATCH 17/29] python3 compatible tests --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 12ecac5ad..55609e107 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 12ecac5ad75a160d5ee8e2b18b752fdab11dfa0d +Subproject commit 55609e1079d6ffde6dffdd584ee22a527ff00a34 From 8616a923096d37640c9fda1e791268ad8d28c735 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 03:50:07 +0200 Subject: [PATCH 18/29] unused module --- processing/pre/geom_fromTable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index 6cdf4b76e..33b75b307 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -1,7 +1,7 @@ #!/usr/bin/env python2.7 # -*- coding: UTF-8 no BOM -*- -import os,sys,math,types,time +import os,sys,math,time import scipy.spatial, numpy as np from optparse import OptionParser import damask From 2d1e933c3d3b2e0946620ad1ee3c723cfd99a7f3 Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 20 Jul 2018 11:03:47 +0200 Subject: [PATCH 19/29] [skip ci] updated version information after successful test of v2.0.2-250-g8616a923 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 8f6be5bd3..0acf5a908 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-241-g800f86e4 +v2.0.2-250-g8616a923 From 76cf126566b9505e83a52453df5d6a85dbf1bacd Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 20 Jul 2018 15:58:21 +0200 Subject: [PATCH 20/29] [skip sc] not really a patch but enables python (3) skipping syntax check as executable files normally only exist in processing and installation --- installation/patch/python2to3.sh | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100755 installation/patch/python2to3.sh diff --git a/installation/patch/python2to3.sh b/installation/patch/python2to3.sh new file mode 100755 index 000000000..1d86b0ce7 --- /dev/null +++ b/installation/patch/python2to3.sh @@ -0,0 +1,8 @@ +#! /usr/bin/env bash +if [ $1x != 3to2x ]; then + echo 'python2.7 to python' + find . -name '*.py' | xargs sed -i 's/usr\/bin\/env python2.7/usr\/bin\/env python/g' +else + echo 'python to python2.7' + find . -name '*.py' | xargs sed -i 's/usr\/bin\/env python/usr\/bin\/env python2.7/g' +fi From 5eff624d3f9ee978cba67c6e6063ea809488a0a2 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 22 Jul 2018 06:39:27 +0200 Subject: [PATCH 21/29] [skip ci] updated version information after successful test of v2.0.2-253-gce203ca7 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 0acf5a908..d340e860c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-250-g8616a923 +v2.0.2-253-gce203ca7 From 449449b5007b31cb42fdaa02236f8157c36e2136 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 28 Jul 2018 01:31:02 +0200 Subject: [PATCH 22/29] does the same as numpy.clip --- src/math.f90 | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/math.f90 b/src/math.f90 index 39adcbba4..edf2ff5a6 100644 --- a/src/math.f90 +++ b/src/math.f90 @@ -160,7 +160,7 @@ module math math_rotate_forward33, & math_rotate_backward33, & math_rotate_forward3333, & - math_limit + math_clip private :: & math_check, & halton @@ -1363,16 +1363,16 @@ pure function math_RtoEuler(R) sqhk =sqrt(R(1,3)*R(1,3)+R(2,3)*R(2,3)) ! calculate PHI - math_RtoEuler(2) = acos(math_limit(R(3,3)/sqhkl,-1.0_pReal, 1.0_pReal)) + math_RtoEuler(2) = acos(math_clip(R(3,3)/sqhkl,-1.0_pReal, 1.0_pReal)) if((math_RtoEuler(2) < 1.0e-8_pReal) .or. (pi-math_RtoEuler(2) < 1.0e-8_pReal)) then math_RtoEuler(3) = 0.0_pReal - math_RtoEuler(1) = acos(math_limit(R(1,1)/squvw, -1.0_pReal, 1.0_pReal)) + math_RtoEuler(1) = acos(math_clip(R(1,1)/squvw, -1.0_pReal, 1.0_pReal)) if(R(2,1) > 0.0_pReal) math_RtoEuler(1) = 2.0_pReal*pi-math_RtoEuler(1) else - math_RtoEuler(3) = acos(math_limit(R(2,3)/sqhk, -1.0_pReal, 1.0_pReal)) + math_RtoEuler(3) = acos(math_clip(R(2,3)/sqhk, -1.0_pReal, 1.0_pReal)) if(R(1,3) < 0.0) math_RtoEuler(3) = 2.0_pReal*pi-math_RtoEuler(3) - math_RtoEuler(1) = acos(math_limit(-R(3,2)/sin(math_RtoEuler(2)), -1.0_pReal, 1.0_pReal)) + math_RtoEuler(1) = acos(math_clip(-R(3,2)/sin(math_RtoEuler(2)), -1.0_pReal, 1.0_pReal)) if(R(3,1) < 0.0) math_RtoEuler(1) = 2.0_pReal*pi-math_RtoEuler(1) end if @@ -1654,7 +1654,7 @@ pure function math_qToEuler(qPassive) math_qToEuler(2) = acos(1.0_pReal-2.0_pReal*(q(2)**2+q(3)**2)) if (abs(math_qToEuler(2)) < 1.0e-6_pReal) then - math_qToEuler(1) = sign(2.0_pReal*acos(math_limit(q(1),-1.0_pReal, 1.0_pReal)),q(4)) + math_qToEuler(1) = sign(2.0_pReal*acos(math_clip(q(1),-1.0_pReal, 1.0_pReal)),q(4)) math_qToEuler(3) = 0.0_pReal else math_qToEuler(1) = atan2(+q(1)*q(3)+q(2)*q(4), q(1)*q(2)-q(3)*q(4)) @@ -1681,7 +1681,7 @@ pure function math_qToAxisAngle(Q) real(pReal) :: halfAngle, sinHalfAngle real(pReal), dimension(4) :: math_qToAxisAngle - halfAngle = acos(math_limit(Q(1),-1.0_pReal,1.0_pReal)) + halfAngle = acos(math_clip(Q(1),-1.0_pReal,1.0_pReal)) sinHalfAngle = sin(halfAngle) smallRotation: if (sinHalfAngle <= 1.0e-4_pReal) then @@ -1741,7 +1741,7 @@ real(pReal) pure function math_EulerMisorientation(EulerA,EulerB) cosTheta = (math_trace33(math_mul33x33(math_EulerToR(EulerB), & transpose(math_EulerToR(EulerA)))) - 1.0_pReal) * 0.5_pReal - math_EulerMisorientation = acos(math_limit(cosTheta,-1.0_pReal,1.0_pReal)) + math_EulerMisorientation = acos(math_clip(cosTheta,-1.0_pReal,1.0_pReal)) end function math_EulerMisorientation @@ -2052,7 +2052,7 @@ function math_eigenvectorBasisSym33(m) EB(3,3,3)=1.0_pReal else threeSimilarEigenvalues rho=sqrt(-3.0_pReal*P**3.0_pReal)/9.0_pReal - phi=acos(math_limit(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) + phi=acos(math_clip(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) values = 2.0_pReal*rho**(1.0_pReal/3.0_pReal)* & [cos(phi/3.0_pReal), & cos((phi+2.0_pReal*PI)/3.0_pReal), & @@ -2117,7 +2117,7 @@ function math_eigenvectorBasisSym33_log(m) EB(3,3,3)=1.0_pReal else threeSimilarEigenvalues rho=sqrt(-3.0_pReal*P**3.0_pReal)/9.0_pReal - phi=acos(math_limit(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) + phi=acos(math_clip(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) values = 2.0_pReal*rho**(1.0_pReal/3.0_pReal)* & [cos(phi/3.0_pReal), & cos((phi+2.0_pReal*PI)/3.0_pReal), & @@ -2229,7 +2229,7 @@ function math_eigenvaluesSym33(m) math_eigenvaluesSym33 = math_eigenvaluesSym(m) else rho=sqrt(-3.0_pReal*P**3.0_pReal)/9.0_pReal - phi=acos(math_limit(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) + phi=acos(math_clip(-Q/rho*0.5_pReal,-1.0_pReal,1.0_pReal)) math_eigenvaluesSym33 = 2.0_pReal*rho**(1.0_pReal/3.0_pReal)* & [cos(phi/3.0_pReal), & cos((phi+2.0_pReal*PI)/3.0_pReal), & @@ -2614,7 +2614,7 @@ end function math_rotate_forward3333 !> @brief limits a scalar value to a certain range (either one or two sided) ! Will return NaN if left > right !-------------------------------------------------------------------------------------------------- -real(pReal) pure function math_limit(a, left, right) +real(pReal) pure function math_clip(a, left, right) use, intrinsic :: & IEEE_arithmetic @@ -2623,14 +2623,14 @@ real(pReal) pure function math_limit(a, left, right) real(pReal), intent(in), optional :: left, right - math_limit = min ( & + math_clip = min ( & max (merge(left, -huge(a), present(left)), a), & merge(right, huge(a), present(right)) & ) if (present(left) .and. present(right)) & - math_limit = merge (IEEE_value(1.0_pReal,IEEE_quiet_NaN),math_limit, left>right) + math_clip = merge (IEEE_value(1.0_pReal,IEEE_quiet_NaN),math_clip, left>right) -end function math_limit +end function math_clip end module math From 708e2e0ac580ce75409185f8d679d9b31837f5ba Mon Sep 17 00:00:00 2001 From: Test User Date: Sat, 28 Jul 2018 16:16:31 +0200 Subject: [PATCH 23/29] [skip ci] updated version information after successful test of v2.0.2-255-g449449b5 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index d340e860c..75fa254a5 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-253-gce203ca7 +v2.0.2-255-g449449b5 From f9d8278ca66e33a04975db266914124d081eeb01 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 29 Jul 2018 22:41:18 +0200 Subject: [PATCH 24/29] using new test variant --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 55609e107..be1d25c20 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 55609e1079d6ffde6dffdd584ee22a527ff00a34 +Subproject commit be1d25c20233b148cb99cdedf202c685eb048ab1 From c96081c99a51817dd3b2a551ee6357ee9916505f Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 30 Jul 2018 11:36:05 +0200 Subject: [PATCH 25/29] [skip ci] updated version information after successful test of v2.0.2-257-gf9d8278c --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 75fa254a5..f7f80b1c8 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-255-g449449b5 +v2.0.2-257-gf9d8278c From 06a0128d91b9b2a725b5a08c366e6b3bae549086 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 30 Jul 2018 12:40:31 +0200 Subject: [PATCH 26/29] no more aliases for c/a --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index be1d25c20..50eb21714 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit be1d25c20233b148cb99cdedf202c685eb048ab1 +Subproject commit 50eb21714e2f501b111bb62096ebb6a5bfc6708a From 2419deea8f43cc2b1aa2740ab9a1f1236a6dcbb1 Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 30 Jul 2018 22:31:50 +0200 Subject: [PATCH 27/29] [skip ci] updated version information after successful test of v2.0.2-259-g06a0128d --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index f7f80b1c8..8bfea9185 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-257-gf9d8278c +v2.0.2-259-g06a0128d From bc3f6ae97c13cfcdd2f19cb084c9195e9b3be606 Mon Sep 17 00:00:00 2001 From: Pratheek Shanthraj Date: Tue, 31 Jul 2018 23:15:44 +0200 Subject: [PATCH 28/29] missing bits in getStrings function --- src/config.f90 | 1 + 1 file changed, 1 insertion(+) diff --git a/src/config.f90 b/src/config.f90 index 9d2ddde4c..d26b72c80 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -670,6 +670,7 @@ function getStrings(this,key,defaultVal,requiredShape,raw) endif else notAllocated if (whole) then + str = item%string%val(item%string%pos(4):) getStrings = [getStrings,str] else do i=2_pInt,item%string%pos(1) From e0a39d202c62cdd67f9b7c26eef4e2a6792bbfd1 Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 1 Aug 2018 07:12:12 +0200 Subject: [PATCH 29/29] [skip ci] updated version information after successful test of v2.0.2-261-gbc3f6ae9 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 8bfea9185..fca0385fb 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-259-g06a0128d +v2.0.2-261-gbc3f6ae9