From 4dfc8d0132e8155da1fa531a6c28e12b8c8017de Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Mon, 22 Aug 2016 10:27:49 -0400 Subject: [PATCH 01/26] add MPI_finalize() following Martin & Philip suggestions; --- code/DAMASK_spectral.f90 | 118 ++++++++++++++++++++------------------- 1 file changed, 62 insertions(+), 56 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index c363393d8..96b3c1451 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -81,7 +81,7 @@ program DAMASK_spectral use spectral_mech_Polarisation use spectral_damage use spectral_thermal - + implicit none @@ -93,9 +93,9 @@ program DAMASK_spectral logical, dimension(9) :: temp_maskVector = .false. !< temporarily from loadcase file when reading in tensors integer(pInt), parameter :: FILEUNIT = 234_pInt !< file unit, DAMASK IO does not support newunit feature integer(pInt), allocatable, dimension(:) :: chunkPos - + integer(pInt) :: & - N_t = 0_pInt, & !< # of time indicators found in load case file + N_t = 0_pInt, & !< # of time indicators found in load case file N_n = 0_pInt, & !< # of increment specifiers found in load case file N_def = 0_pInt !< # of rate of deformation specifiers found in load case file character(len=65536) :: & @@ -105,7 +105,7 @@ program DAMASK_spectral ! loop variables, convergence etc. real(pReal), dimension(3,3), parameter :: & ones = 1.0_pReal, & - zeros = 0.0_pReal + zeros = 0.0_pReal integer(pInt), parameter :: & subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0 real(pReal) :: & @@ -150,6 +150,7 @@ program DAMASK_spectral MPI_file_get_position, & MPI_file_write, & MPI_abort, & + MPI_finalize, & MPI_allreduce, & PETScFinalize @@ -159,7 +160,7 @@ program DAMASK_spectral write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>' write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - + !-------------------------------------------------------------------------------------------------- ! initialize field solver information nActiveFields = 1 @@ -192,14 +193,14 @@ program DAMASK_spectral call IO_error(error_ID=837_pInt,ext_msg = trim(loadCaseFile)) ! error message for incomplete loadcase allocate (loadCases(N_n)) ! array of load cases loadCases%P%myType='p' - + do i = 1, size(loadCases) allocate(loadCases(i)%ID(nActiveFields)) field = 1 loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then field = field + 1 - loadCases(i)%ID(field) = FIELD_THERMAL_ID + loadCases(i)%ID(field) = FIELD_THERMAL_ID endif thermalActive damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then field = field + 1 @@ -231,11 +232,11 @@ program DAMASK_spectral do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a * enddo - do j = 1_pInt,9_pInt + do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation - transpose(reshape(temp_maskVector,[ 3,3])) + transpose(reshape(temp_maskVector,[ 3,3])) loadCases(currentLoadCase)%deformation%maskFloat = & ! float (1.0/0.0) mask in 3x3 notation merge(ones,zeros,loadCases(currentLoadCase)%deformation%maskLogical) loadCases(currentLoadCase)%deformation%values = math_plain9to33(temp_valueVector) ! values in 3x3 notation @@ -259,10 +260,10 @@ program DAMASK_spectral loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt) loadCases(currentLoadCase)%logscale = 1_pInt case('freq','frequency','outputfreq') ! frequency of result writings - loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt) + loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt) case('r','restart','restartwrite') ! frequency of writing restart information loadCases(currentLoadCase)%restartfrequency = & - max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt)) + max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt)) case('guessreset','dropguessing') loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory case('euler') ! rotation of currentLoadCase given in euler angles @@ -271,10 +272,10 @@ program DAMASK_spectral k = 1_pInt ! assuming keyword indicating degree/radians present select case (IO_lc(IO_stringValue(line,chunkPos,i+1_pInt))) case('deg','degree') - case('rad','radian') ! don't convert from degree to radian + case('rad','radian') ! don't convert from degree to radian l = 0_pInt - case default - k = 0_pInt + case default + k = 0_pInt end select do j = 1_pInt, 3_pInt temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j) @@ -289,7 +290,7 @@ program DAMASK_spectral loadCases(currentLoadCase)%rotation = math_plain9to33(temp_valueVector) end select enddo; enddo - close(FILEUNIT) + close(FILEUNIT) !-------------------------------------------------------------------------------------------------- ! consistency checks and output of load case @@ -323,7 +324,7 @@ program DAMASK_spectral enddo if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. & loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only - if (any(loadCases(currentLoadCase)%P%maskLogical .and. & + if (any(loadCases(currentLoadCase)%P%maskLogical .and. & transpose(loadCases(currentLoadCase)%P%maskLogical) .and. & reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) & errorID = 838_pInt ! no rotation is allowed by stress BC @@ -358,7 +359,7 @@ program DAMASK_spectral endif !-------------------------------------------------------------------------------------------------- -! doing initialization depending on selected solver +! doing initialization depending on selected solver call Utilities_init() do field = 1, nActiveFields select case (loadCases(1)%ID(field)) @@ -370,26 +371,26 @@ program DAMASK_spectral if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & call IO_warning(42_pInt, ext_msg='debug Divergence') call AL_init - + case (DAMASK_spectral_SolverPolarisation_label) if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & call IO_warning(42_pInt, ext_msg='debug Divergence') call Polarisation_init - + case default call IO_error(error_ID = 891, ext_msg = trim(spectral_solver)) - - end select - + + end select + case(FIELD_THERMAL_ID) call spectral_thermal_init - + case(FIELD_DAMAGE_ID) call spectral_damage_init() end select enddo - + !-------------------------------------------------------------------------------------------------- ! write header of output file if (worldrank == 0) then @@ -408,7 +409,7 @@ program DAMASK_spectral write(resUnit) 'logscales:', loadCases%logscale write(resUnit) 'increments:', loadCases%incs ! one entry per LoadCase write(resUnit) 'startingIncrement:', restartInc - 1_pInt ! start with writing out the previous inc - write(resUnit) 'eoh' + write(resUnit) 'eoh' close(resUnit) ! end of header open(newunit=statUnit,file=trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//& '.sta',form='FORMATTED',status='REPLACE') @@ -458,7 +459,7 @@ program DAMASK_spectral !-------------------------------------------------------------------------------------------------- ! loopping over loadcases loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases) - time0 = time ! currentLoadCase start time + time0 = time ! currentLoadCase start time guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc !-------------------------------------------------------------------------------------------------- @@ -472,9 +473,9 @@ program DAMASK_spectral if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal) ! only valid for given linear time scale. will be overwritten later in case loglinear scale is used else - if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale + if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale if (inc == 1_pInt) then ! 1st inc of 1st currentLoadCase of logarithmic scale - timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd + timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd else ! not-1st inc of 1st currentLoadCase of logarithmic scale timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal)) endif @@ -492,12 +493,12 @@ program DAMASK_spectral stepFraction = 0_pInt !-------------------------------------------------------------------------------------------------- -! loop over sub incs +! loop over sub incs subIncLooping: do while (stepFraction/subStepFactor**cutBackLevel <1_pInt) time = time + timeinc ! forward time - stepFraction = stepFraction + 1_pInt + stepFraction = stepFraction + 1_pInt remainingLoadCaseTime = time0 - time + loadCases(currentLoadCase)%time + timeInc - + !-------------------------------------------------------------------------------------------------- ! report begin of new increment if (worldrank == 0) then @@ -515,7 +516,7 @@ program DAMASK_spectral ',a,'//IO_intOut(stepFraction)//',a,'//IO_intOut(subStepFactor**cutBackLevel)//')') & 'Increment ',totalIncsCounter,'/',sum(loadCases%incs),& '-',stepFraction, '/', subStepFactor**cutBackLevel - endif + endif !-------------------------------------------------------------------------------------------------- ! forward fields @@ -541,18 +542,18 @@ program DAMASK_spectral F_BC = loadCases(currentLoadCase)%deformation, & P_BC = loadCases(currentLoadCase)%P, & rotation_BC = loadCases(currentLoadCase)%rotation) - end select - + end select + case(FIELD_THERMAL_ID) call spectral_thermal_forward (& guess,timeinc,timeIncOld,remainingLoadCaseTime) - + case(FIELD_DAMAGE_ID) call spectral_damage_forward (& guess,timeinc,timeIncOld,remainingLoadCaseTime) end select - enddo - + enddo + !-------------------------------------------------------------------------------------------------- ! solve fields stagIter = 0_pInt @@ -568,27 +569,27 @@ program DAMASK_spectral P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - + case (DAMASK_spectral_SolverAL_label) solres(field) = AL_solution (& incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - + case (DAMASK_spectral_SolverPolarisation_label) solres(field) = Polarisation_solution (& incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & P_BC = loadCases(currentLoadCase)%P, & F_BC = loadCases(currentLoadCase)%deformation, & rotation_BC = loadCases(currentLoadCase)%rotation) - - end select - + + end select + case(FIELD_THERMAL_ID) solres(field) = spectral_thermal_solution (& guess,timeinc,timeIncOld,remainingLoadCaseTime) - + case(FIELD_DAMAGE_ID) solres(field) = spectral_damage_solution (& guess,timeinc,timeIncOld,remainingLoadCaseTime) @@ -600,11 +601,11 @@ program DAMASK_spectral stagIterate = stagIter < stagItMax .and. & all(solres(:)%converged) .and. & .not. all(solres(:)%stagConverged) - enddo + enddo !-------------------------------------------------------------------------------------------------- -! check solution - cutBack = .False. +! check solution + cutBack = .False. if(solres(1)%termIll .or. .not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found if (cutBackLevel < maxCutBack) then ! do cut back if (worldrank == 0) write(6,'(/,a)') ' cut back detected' @@ -617,8 +618,8 @@ program DAMASK_spectral call IO_warning(850_pInt) call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written elseif (continueCalculation == 1_pInt) then - guess = .true. ! accept non converged BVP solution - else ! default behavior, exit if spectral solver does not converge + guess = .true. ! accept non converged BVP solution + else ! default behavior, exit if spectral solver does not converge call IO_warning(850_pInt) call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written endif @@ -630,8 +631,8 @@ program DAMASK_spectral write(statUnit,*) totalIncsCounter, time, cutBackLevel, & solres%converged, solres%iterationsNeeded ! write statistics about accepted solution flush(statUnit) - endif - endif + endif + endif enddo subIncLooping cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc if(all(solres(:)%converged)) then ! report converged inc @@ -662,11 +663,11 @@ program DAMASK_spectral enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position endif - if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving - mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? + if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving + mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? restartWrite = .true. lastRestartWritten = inc - endif + endif else forwarding time = time + timeinc guess = .true. @@ -698,7 +699,7 @@ program DAMASK_spectral call AL_destroy() case (DAMASK_spectral_SolverPolarisation_label) call Polarisation_destroy() - end select + end select case(FIELD_THERMAL_ID) call spectral_thermal_destroy() case(FIELD_DAMAGE_ID) @@ -709,6 +710,11 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) + #ifdef _OPENMP + call MPI_finalize(i) + if (i /= 0_pInt) call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + #endif + if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged call quit(0_pInt) ! no complains ;) @@ -719,7 +725,7 @@ end program DAMASK_spectral !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @brief quit subroutine to mimic behavior of FEM solvers !> @details exits the Spectral solver and reports time and duration. Exit code 0 signals -!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code +!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code !> 2 signals no converged solution and increment of last saved restart information is written to !> stderr. Exit code 3 signals no severe problems, but some increments did not converge !-------------------------------------------------------------------------------------------------- @@ -739,7 +745,7 @@ subroutine quit(stop_id) write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',& dateAndTime(6),':',& dateAndTime(7) - + if (stop_id == 0_pInt) stop 0 ! normal termination if (stop_id < 0_pInt) then ! terminally ill, restart might help write(0,'(a,i6)') 'restart information available at ', stop_id*(-1_pInt) From 3d0e19de0a3632daf1908652913abf02868466de Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Mon, 22 Aug 2016 17:45:05 -0400 Subject: [PATCH 02/26] syntax polish for ifdef --- code/DAMASK_spectral.f90 | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 96b3c1451..1d3a0a0cb 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -711,8 +711,10 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) #ifdef _OPENMP - call MPI_finalize(i) - if (i /= 0_pInt) call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + call MPI_finalize(i) + if (i /= 0_pInt) then + call IO_error(error_ID=894, el=i, ext_msg="Finalize()") + endif #endif if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged From af10f920419506b97f7abf6d3c1119bfb5e8723b Mon Sep 17 00:00:00 2001 From: zhangc43 Date: Tue, 23 Aug 2016 08:29:42 -0400 Subject: [PATCH 03/26] remove space before preprocessor --- code/DAMASK_spectral.f90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 1d3a0a0cb..2793b502c 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -710,12 +710,12 @@ program DAMASK_spectral call PETScFinalize(ierr); CHKERRQ(ierr) - #ifdef _OPENMP +#ifdef _OPENMP call MPI_finalize(i) if (i /= 0_pInt) then call IO_error(error_ID=894, el=i, ext_msg="Finalize()") endif - #endif +#endif if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged call quit(0_pInt) ! no complains ;) From 5b6f41cb6b7c73976c5cf74922a17e31ed7833c1 Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 24 Aug 2016 04:27:41 +0200 Subject: [PATCH 04/26] updated version information after successful test of v2.0.1-47-gaf10f92 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 921b19cbd..a4ea1f3b9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-43-g64ac05f +v2.0.1-47-gaf10f92 From f984f1ebea85ff8dc502330684b628945aa95b62 Mon Sep 17 00:00:00 2001 From: Zhuowen Zhao Date: Wed, 24 Aug 2016 16:05:50 -0400 Subject: [PATCH 05/26] enable floating point data and corrected counting of microstructures --- processing/pre/geom_translate.py | 39 +++++++++++++++++++------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/processing/pre/geom_translate.py b/processing/pre/geom_translate.py index 83c71aa8d..f0d39e66f 100755 --- a/processing/pre/geom_translate.py +++ b/processing/pre/geom_translate.py @@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute', dest = 'substitute', action = 'extend', metavar = '', help = 'substitutions of microstructure indices from,to,from,to,...') +parser.add_option('--float', + dest = 'real', + action = 'store_true', + help = 'use float input') parser.set_defaults(origin = (0.0,0.0,0.0), microstructure = 0, substitute = [], + real = False, ) (options, filenames) = parser.parse_args() +datatype = 'f' if options.real else 'i' + sub = {} -for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to" +for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to" sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1]) -# --- loop over input files ------------------------------------------------------------------------- +# --- loop over input files ---------------------------------------------------------------------- if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name, - buffered = False, labeled = False) + try: table = damask.ASCIItable(name = name, + buffered = False, + labeled = False) except: continue damask.util.report(scriptName,name) -# --- interpret header ---------------------------------------------------------------------------- +# --- interpret header --------------------------------------------------------------------------- table.head_read() info,extra_header = table.head_getGeom() @@ -73,9 +80,9 @@ for name in filenames: table.close(dismiss = True) continue -# --- read data ------------------------------------------------------------------------------------ +# --- read data ---------------------------------------------------------------------------------- - microstructure = table.microstructure_read(info['grid']) # read microstructure + microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure # --- do work ------------------------------------------------------------------------------------ @@ -90,9 +97,9 @@ for name in filenames: substituted += options.microstructure # shift microstructure indices newInfo['origin'] = info['origin'] + options.origin - newInfo['microstructures'] = substituted.max() + newInfo['microstructures'] = len(np.unique(substituted)) -# --- report --------------------------------------------------------------------------------------- +# --- report ------------------------------------------------------------------------------------- remarks = [] if (any(newInfo['origin'] != info['origin'])): @@ -101,7 +108,7 @@ for name in filenames: remarks.append('--> microstructures: %i'%newInfo['microstructures']) if remarks != []: damask.util.croak(remarks) -# --- write header --------------------------------------------------------------------------------- +# --- write header ------------------------------------------------------------------------------- table.labels_clear() table.info_clear() @@ -116,12 +123,12 @@ for name in filenames: ]) table.head_write() -# --- write microstructure information ------------------------------------------------------------ +# --- write microstructure information ----------------------------------------------------------- - formatwidth = int(math.floor(math.log10(microstructure.max())+1)) + format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1))) table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose() - table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ') + table.data_writeArray(format,delimiter = ' ') -# --- output finalization -------------------------------------------------------------------------- +# --- output finalization ------------------------------------------------------------------------ - table.close() # close ASCII table + table.close() # close ASCII table From 64db098e2a0701589926278c81b7c998f87eadea Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 25 Aug 2016 04:27:12 +0200 Subject: [PATCH 06/26] updated version information after successful test of v2.0.1-49-gf984f1e --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index a4ea1f3b9..a0ee30953 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-47-gaf10f92 +v2.0.1-49-gf984f1e From 60765067389f77f21bbedcbfac0655fc71899c4f Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 11:57:19 -0400 Subject: [PATCH 07/26] general polishing and removal of redundant do-loop --- code/DAMASK_spectral.f90 | 53 +++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 28 deletions(-) diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 2793b502c..67e3f4042 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -231,8 +231,6 @@ program DAMASK_spectral endif do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a * - enddo - do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation @@ -244,8 +242,6 @@ program DAMASK_spectral temp_valueVector = 0.0_pReal do j = 1_pInt, 9_pInt temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk - enddo - do j = 1_pInt,9_pInt if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3])) @@ -302,14 +298,14 @@ program DAMASK_spectral write(6,'(1x,a,i6)') 'load case: ', currentLoadCase if (.not. loadCases(currentLoadCase)%followFormerTrajectory) & write(6,'(2x,a)') 'drop guessing along trajectory' - if (loadCases(currentLoadCase)%deformation%myType=='l') then + if (loadCases(currentLoadCase)%deformation%myType == 'l') then do j = 1_pInt, 3_pInt if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. & any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) & errorID = 832_pInt ! each row should be either fully or not at all defined enddo write(6,'(2x,a)') 'velocity gradient:' - else if (loadCases(currentLoadCase)%deformation%myType=='f') then + else if (loadCases(currentLoadCase)%deformation%myType == 'f') then write(6,'(2x,a)') 'deformation gradient at end of load case:' else write(6,'(2x,a)') 'deformation gradient rate:' @@ -318,13 +314,13 @@ program DAMASK_spectral if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j) else - write(6,'(2x,12a)',advance='no') ' * ' + write(6,'(2x,12a)',advance='no') ' * ' endif enddo; write(6,'(/)',advance='no') enddo if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. & - loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only - if (any(loadCases(currentLoadCase)%P%maskLogical .and. & + loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only + if (any(loadCases(currentLoadCase)%P%maskLogical .and. & transpose(loadCases(currentLoadCase)%P%maskLogical) .and. & reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) & errorID = 838_pInt ! no rotation is allowed by stress BC @@ -333,12 +329,12 @@ program DAMASK_spectral if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal else - write(6,'(2x,12a)',advance='no') ' * ' + write(6,'(2x,12a)',advance='no') ' * ' endif enddo; write(6,'(/)',advance='no') enddo if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, & - math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >& + math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > & reshape(spread(tol_math_check,1,9),[ 3,3]))& .or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > & 1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain @@ -378,10 +374,10 @@ program DAMASK_spectral call Polarisation_init case default - call IO_error(error_ID = 891, ext_msg = trim(spectral_solver)) - - end select - + call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver)) + + end select + case(FIELD_THERMAL_ID) call spectral_thermal_init @@ -428,29 +424,30 @@ program DAMASK_spectral allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND) outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND) call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce') call MPI_file_open(PETSC_COMM_WORLD, & trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', & MPI_MODE_WRONLY + MPI_MODE_APPEND, & MPI_INFO_NULL, & resUnit, & ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open') call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position') fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me) call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek') if (.not. appendToOutFile) then ! if not restarting, write 0th increment - do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output - outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & - min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) - call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),& - [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & - (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,& + do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output + outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & + min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) + call MPI_file_write(resUnit, & + reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), & + [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & + (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, & MPI_DOUBLE, MPI_STATUS_IGNORE, ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') + if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write') enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position if (worldrank == 0) & @@ -489,7 +486,7 @@ program DAMASK_spectral endif timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step - forwarding: if(totalIncsCounter >= restartInc) then + forwarding: if (totalIncsCounter >= restartInc) then stepFraction = 0_pInt !-------------------------------------------------------------------------------------------------- @@ -595,7 +592,7 @@ program DAMASK_spectral guess,timeinc,timeIncOld,remainingLoadCaseTime) end select - if(.not. solres(field)%converged) exit ! no solution found + if (.not. solres(field)%converged) exit ! no solution found enddo stagIter = stagIter + 1_pInt stagIterate = stagIter < stagItMax .and. & From f9c3d335fcf60a1bba377215191a0ff98668e902 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 11:59:04 -0400 Subject: [PATCH 08/26] two new methods to convey deemphasis and deletion --- lib/damask/util.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/lib/damask/util.py b/lib/damask/util.py index cfc44f26c..4aec363c5 100644 --- a/lib/damask/util.py +++ b/lib/damask/util.py @@ -20,6 +20,7 @@ class bcolors: FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' + DIM = '\033[2m' UNDERLINE = '\033[4m' def disable(self): @@ -70,9 +71,19 @@ def report_geom(info, # ----------------------------- def emph(what): - """emphasizes string on screen""" + """boldens string""" return bcolors.BOLD+srepr(what)+bcolors.ENDC +# ----------------------------- +def deemph(what): + """dims string""" + return bcolors.DIM+srepr(what)+bcolors.ENDC + +# ----------------------------- +def delete(what): + """dims string""" + return bcolors.DIM+srepr(what)+bcolors.ENDC + # ----------------------------- def execute(cmd, streamIn = None, From d3eb3451e3c9bca2e97fb75537737c7670a2029d Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:00:10 -0400 Subject: [PATCH 09/26] improved readability by splitting to one file per line --- code/Makefile | 113 +++++++++++++++++++++++++++++++++++++------------- 1 file changed, 85 insertions(+), 28 deletions(-) diff --git a/code/Makefile b/code/Makefile index 7e5a130b1..331feec27 100644 --- a/code/Makefile +++ b/code/Makefile @@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\ #-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions. #-Wstrict-overflow: -DEBUG_OPTIONS_gfortran :=-g\ - -fbacktrace\ - -fdump-core\ - -fcheck=all\ +DEBUG_OPTIONS_gfortran :=-g \ + -fbacktrace \ + -fdump-core \ + -fcheck=all \ -ffpe-trap=invalid,zero,overflow ################################################################################################### @@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$ COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90)) ################################################################################################### SOURCE_FILES = \ - source_thermal_dissipation.o source_thermal_externalheat.o \ - source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \ - source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o + source_thermal_dissipation.o \ + source_thermal_externalheat.o \ + source_damage_isoBrittle.o \ + source_damage_isoDuctile.o \ + source_damage_anisoBrittle.o \ + source_damage_anisoDuctile.o \ + source_vacancy_phenoplasticity.o \ + source_vacancy_irradiation.o \ + source_vacancy_thermalfluc.o KINEMATICS_FILES = \ - kinematics_cleavage_opening.o kinematics_slipplane_opening.o \ + kinematics_cleavage_opening.o \ + kinematics_slipplane_opening.o \ kinematics_thermal_expansion.o \ - kinematics_vacancy_strain.o kinematics_hydrogen_strain.o + kinematics_vacancy_strain.o \ + kinematics_hydrogen_strain.o PLASTIC_FILES = \ - plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \ - plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \ + plastic_dislotwin.o \ + plastic_disloUCLA.o \ + plastic_isotropic.o \ + plastic_phenopowerlaw.o \ + plastic_titanmod.o \ + plastic_nonlocal.o \ + plastic_none.o \ plastic_phenoplus.o THERMAL_FILES = \ - thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o + thermal_isothermal.o \ + thermal_adiabatic.o \ + thermal_conduction.o DAMAGE_FILES = \ - damage_none.o damage_local.o damage_nonlocal.o + damage_none.o \ + damage_local.o \ + damage_nonlocal.o VACANCYFLUX_FILES = \ - vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o + vacancyflux_isoconc.o \ + vacancyflux_isochempot.o \ + vacancyflux_cahnhilliard.o POROSITY_FILES = \ - porosity_none.o porosity_phasefield.o + porosity_none.o \ + porosity_phasefield.o HYDROGENFLUX_FILES = \ - hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o + hydrogenflux_isoconc.o \ + hydrogenflux_cahnhilliard.o HOMOGENIZATION_FILES = \ - homogenization_RGC.o homogenization_isostrain.o homogenization_none.o + homogenization_RGC.o \ + homogenization_isostrain.o \ + homogenization_none.o ##################### # Spectral Solver @@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90 SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \ spectral_thermal.o spectral_damage.o -SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \ - FEsolving.o mesh.o material.o lattice.o \ - $(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ +SPECTRAL_FILES = C_routines.o \ + system_routines.o \ + prec.o \ + DAMASK_interface.o \ + IO.o \ + numerics.o \ + debug.o \ + math.o \ + FEsolving.o \ + mesh.o \ + material.o \ + lattice.o \ + $(SOURCE_FILES) \ + $(KINEMATICS_FILES) \ + $(PLASTIC_FILES) \ + constitutive.o \ crystallite.o \ - $(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ + $(THERMAL_FILES) \ + $(DAMAGE_FILES) \ + $(VACANCYFLUX_FILES) \ + $(HYDROGENFLUX_FILES) \ + $(POROSITY_FILES) \ $(HOMOGENIZATION_FILES) homogenization.o \ CPFEM2.o \ spectral_utilities.o \ @@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./ FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o -FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \ - FEsolving.o mesh.o material.o lattice.o \ - $(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ +FEM_FILES = prec.o \ + DAMASK_interface.o \ + FEZoo.o \ + IO.o \ + numerics.o \ + debug.o \ + math.o \ + FEsolving.o \ + mesh.o \ + material.o \ + lattice.o \ + $(SOURCE_FILES) \ + $(KINEMATICS_FILES) \ + $(PLASTIC_FILES) \ + constitutive.o \ crystallite.o \ - $(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ + $(THERMAL_FILES) \ + $(DAMAGE_FILES) \ + $(VACANCYFLUX_FILES) \ + $(HYDROGENFLUX_FILES) \ + $(POROSITY_FILES) \ $(HOMOGENIZATION_FILES) homogenization.o \ CPFEM.o \ - FEM_utilities.o $(FEM_SOLVER_FILES) + FEM_utilities.o \ + $(FEM_SOLVER_FILES) DAMASK_FEM.exe: DAMASK_FEM_driver.o $(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \ @@ -658,8 +715,8 @@ tidy: @rm -rf *.inst.f90 # for instrumentation @rm -rf *.pomp.f90 # for instrumentation @rm -rf *.pp.f90 # for instrumentation - @rm -rf *.pdb # for instrumnentation - @rm -rf *.opari.inc # for instrumnentation + @rm -rf *.pdb # for instrumentation + @rm -rf *.opari.inc # for instrumentation .PHONY: cleanDAMASK cleanDAMASK: From eb9f6c939c4b83e746a4a4293937e6255f32cdf3 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:08:19 -0400 Subject: [PATCH 10/26] condensed output, DAMASK_marc symlinks are relative, add symlink pruning --- installation/symlink_Code.py | 56 +++++++++++++++++++++--------- installation/symlink_Processing.py | 40 ++++++++++++++++----- 2 files changed, 71 insertions(+), 25 deletions(-) diff --git a/installation/symlink_Code.py b/installation/symlink_Code.py index 1e67c8a7b..933f31c8e 100755 --- a/installation/symlink_Code.py +++ b/installation/symlink_Code.py @@ -11,7 +11,15 @@ bin_link = { \ ], } -MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015] +MarcReleases =[ \ + '2011', + '2012', + '2013', + '2013.1', + '2014', + '2014.2', + '2015', + ] damaskEnv = damask.Environment() baseDir = damaskEnv.relPath('code/') @@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN'] if not os.path.isdir(binDir): os.mkdir(binDir) -for dir in bin_link: - for file in bin_link[dir]: - src = os.path.abspath(os.path.join(baseDir,dir,file)) - if os.path.exists(src): - sym_link = os.path.abspath(os.path.join(binDir,\ - {True: dir, - False:os.path.splitext(file)[0]}[file == ''])) - if os.path.lexists(sym_link): os.remove(sym_link) - os.symlink(src,sym_link) - sys.stdout.write(sym_link+' -> '+src+'\n') +sys.stdout.write('\nsymbolic linking...\n') +for subDir in bin_link: + theDir = os.path.abspath(os.path.join(baseDir,subDir)) + sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n') + + for theFile in bin_link[subDir]: + theName,theExt = os.path.splitext(theFile) + src = os.path.abspath(os.path.join(theDir,theFile)) + + if os.path.exists(src): + sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName)) + + if os.path.lexists(sym_link): + os.remove(sym_link) + output = theName+damask.util.deemph(theExt) + else: + output = damask.util.emph(theName)+damask.util.deemph(theExt) + + sys.stdout.write(damask.util.deemph('... ')+output+'\n') + os.symlink(src,sym_link) + + +sys.stdout.write('\nMSC.Marc versioning...\n\n') +theMaster = 'DAMASK_marc.f90' for version in MarcReleases: - src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90')) + src = os.path.abspath(os.path.join(baseDir,theMaster)) if os.path.exists(src): - sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90')) + sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version))) if os.path.lexists(sym_link): os.remove(sym_link) - sys.stdout.write(sym_link) + output = version else: - sys.stdout.write(damask.util.emph(sym_link)) + output = damask.util.emph(version) - os.symlink(src,sym_link) - sys.stdout.write(' -> '+src+'\n') + sys.stdout.write(' '+output+'\n') + os.symlink(theMaster,sym_link) diff --git a/installation/symlink_Processing.py b/installation/symlink_Processing.py index 6cb8f9135..d10b5af55 100755 --- a/installation/symlink_Processing.py +++ b/installation/symlink_Processing.py @@ -13,23 +13,47 @@ if not os.path.isdir(binDir): os.mkdir(binDir) #define ToDo list -processing_subDirs = ['pre','post','misc',] -processing_extensions = ['.py','.sh',] - +processing_subDirs = ['pre', + 'post', + 'misc', + ] +processing_extensions = ['.py', + '.sh', + ] + +sys.stdout.write('\nsymbolic linking...\n') + for subDir in processing_subDirs: theDir = os.path.abspath(os.path.join(baseDir,subDir)) + sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n') + for theFile in os.listdir(theDir): - if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions + theName,theExt = os.path.splitext(theFile) + if theExt in processing_extensions: # only consider files with proper extensions src = os.path.abspath(os.path.join(theDir,theFile)) - sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0])) + sym_link = os.path.abspath(os.path.join(binDir,theName)) if os.path.lexists(sym_link): os.remove(sym_link) - sys.stdout.write(sym_link) + output = theName+damask.util.deemph(theExt) else: - sys.stdout.write(damask.util.emph(sym_link)) + output = damask.util.emph(theName)+damask.util.deemph(theExt) + sys.stdout.write(damask.util.deemph('... ')+output+'\n') os.symlink(src,sym_link) - sys.stdout.write(' -> '+src+'\n') + + +sys.stdout.write('\npruning broken links...\n') + +brokenLinks = 0 + +for filename in os.listdir(binDir): + path = os.path.join(binDir,filename) + if os.path.islink(path) and not os.path.exists(path): + sys.stdout.write(' '+damask.util.delete(path)+'\n') + os.remove(path) + brokenLinks += 1 + +sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n') From 85abf84186cd89c9d2e8fd2444b1117fa12b14b7 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:15:03 -0400 Subject: [PATCH 11/26] generalized to user-specified mapping function instead of hardwired avg --- processing/post/averageTable.py | 47 ++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/processing/post/averageTable.py b/processing/post/averageTable.py index 25c09625c..f9c6693ed 100755 --- a/processing/post/averageTable.py +++ b/processing/post/averageTable.py @@ -2,6 +2,7 @@ # -*- coding: UTF-8 no BOM -*- import os,sys +import math # noqa import numpy as np from optparse import OptionParser import damask @@ -14,7 +15,7 @@ scriptID = ' '.join([scriptName,damask.version]) # -------------------------------------------------------------------- parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Replace all rows for which column 'label' has identical values by a single row containing their average. +Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row. Output table will contain as many rows as there are different (unique) values in the grouping column. Examples: @@ -25,11 +26,33 @@ parser.add_option('-l','--label', dest = 'label', type = 'string', metavar = 'string', help = 'column label for grouping rows') +parser.add_option('-f','--function', + dest = 'function', + type = 'string', metavar = 'string', + help = 'mapping function [%default]') +parser.add_option('-a','--all', + dest = 'all', + action = 'store_true' + help = 'apply mapping function also to grouping column') + +parser.set_defaults(function = 'np.average') (options,filenames) = parser.parse_args() +funcModule,funcName = options.function.split('.') + +try: + mapFunction = getattr(locals().get(funcModule) or + globals().get(funcModule) or + __import__(funcModule), + funcName) +except: + mapFunction = None + if options.label is None: parser.error('no grouping column specified.') +if not hasattr(mapFunction,'__call__'): + parser.error('function "{}" is not callable.'.format(options.function)) # --- loop over input files ------------------------------------------------------------------------- @@ -38,10 +61,6 @@ if filenames == []: filenames = [None] for name in filenames: try: table = damask.ASCIItable(name = name, - outname = os.path.join( - os.path.split(name)[0], - options.label+'_averaged_'+os.path.split(name)[1] - ) if name else name, buffered = False) except: continue damask.util.report(scriptName,name) @@ -53,6 +72,8 @@ for name in filenames: damask.util.croak('column {} is not of scalar dimension.'.format(options.label)) table.close(dismiss = True) # close ASCIItable and remove empty file continue + else: + grpColumn = table.label_index(options.label) # ------------------------------------------ assemble info --------------------------------------- @@ -64,17 +85,17 @@ for name in filenames: table.data_readArray() rows,cols = table.data.shape - table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])] + table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn - values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True) - index = np.append(index,rows) - avgTable = np.empty((len(values), cols)) + values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions + index = np.append(index,rows) # add termination position + grpTable = np.empty((len(values), cols)) # initialize output - for j in xrange(cols) : - for i in xrange(len(values)) : - avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j]) + for i in xrange(len(values)): # iterate over groups (unique values in grpColumn) + grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function + if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value - table.data = avgTable + table.data = grpTable # ------------------------------------------ output result ------------------------------- From 55d6adf1b84e5164143e08b0f7f5ad252ef9b9dc Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:15:41 -0400 Subject: [PATCH 12/26] renamed to reflect more general nature of script --- processing/post/{averageTable.py => groupTable.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename processing/post/{averageTable.py => groupTable.py} (100%) diff --git a/processing/post/averageTable.py b/processing/post/groupTable.py similarity index 100% rename from processing/post/averageTable.py rename to processing/post/groupTable.py From 891ac4d5855800d40ff5bac96dc0e8598eddfe34 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 25 Aug 2016 12:17:27 -0400 Subject: [PATCH 13/26] fixed comma syntax error --- processing/post/groupTable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/post/groupTable.py b/processing/post/groupTable.py index f9c6693ed..924105fba 100755 --- a/processing/post/groupTable.py +++ b/processing/post/groupTable.py @@ -32,7 +32,7 @@ parser.add_option('-f','--function', help = 'mapping function [%default]') parser.add_option('-a','--all', dest = 'all', - action = 'store_true' + action = 'store_true', help = 'apply mapping function also to grouping column') parser.set_defaults(function = 'np.average') From 2d43dbc88156cbde1751ca91a065cec533aab09e Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 26 Aug 2016 04:27:18 +0200 Subject: [PATCH 14/26] updated version information after successful test of v2.0.1-57-g891ac4d --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index a0ee30953..ea7f6e12c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-49-gf984f1e +v2.0.1-57-g891ac4d From 4b02a55f4d34f45986ccf9e3e9312eb6e9d86b1f Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 30 Aug 2016 16:08:47 -0400 Subject: [PATCH 15/26] explicit type casting from boolean to int --- processing/post/vtk_rectilinearGrid.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/post/vtk_rectilinearGrid.py b/processing/post/vtk_rectilinearGrid.py index 73a64c4aa..dfe376b3f 100755 --- a/processing/post/vtk_rectilinearGrid.py +++ b/processing/post/vtk_rectilinearGrid.py @@ -79,9 +79,9 @@ for name in filenames: coords = [np.unique(table.data[:,i]) for i in xrange(3)] if options.mode == 'cell': - coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \ + coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \ [coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \ - [3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)] + [3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)] grid = np.array(map(len,coords),'i') N = grid.prod() if options.mode == 'point' else (grid-1).prod() From 22e7d6d4e129a9dd46a9c9c9c8b02cb7c81d029d Mon Sep 17 00:00:00 2001 From: Test User Date: Wed, 31 Aug 2016 04:26:22 +0200 Subject: [PATCH 16/26] updated version information after successful test of v2.0.1-59-g4b02a55 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index ea7f6e12c..8a765d7c8 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-57-g891ac4d +v2.0.1-59-g4b02a55 From d529eae4a44598626badb64d5d9af9dedb8a5e8c Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 31 Aug 2016 22:54:00 -0400 Subject: [PATCH 17/26] fixed MPI_reduce hiccup see http://stackoverflow.com/questions/17741574/in-place-mpi-reduce-crashes-with-openmpi --- code/spectral_utilities.f90 | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/code/spectral_utilities.f90 b/code/spectral_utilities.f90 index b9ed5245d..bb11ff294 100644 --- a/code/spectral_utilities.f90 +++ b/code/spectral_utilities.f90 @@ -237,7 +237,7 @@ subroutine utilities_init() grid1Red = grid(1)/2_pInt + 1_pInt wgt = 1.0/real(product(grid),pReal) - if (worldrank == 0) then + if (worldrank == 0_pInt) then write(6,'(a,3(i12 ))') ' grid a b c: ', grid write(6,'(a,3(es12.5))') ' size x y z: ', geomSize endif @@ -1015,10 +1015,19 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, & defgradDetMax = max(defgradDetMax,defgradDet) defgradDetMin = min(defgradDetMin,defgradDet) end do - call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') - call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) - if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') + + if (worldrank == 0_pInt) then + call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) + else + call MPI_reduce(defgradDetMax,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr) + endif + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max') + if (worldrank == 0_pInt) then + call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) + else + call MPI_reduce(defgradDetMin,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr) + endif + if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min') if (worldrank == 0_pInt) then write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin From 6db0a42eaef45ff7b65da7006bd5140377fb76a0 Mon Sep 17 00:00:00 2001 From: Chen Date: Thu, 1 Sep 2016 12:47:26 -0400 Subject: [PATCH 18/26] addvtk data now support tensor type (9 components) --- processing/post/vtk_addPointcloudData.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 340ef700e..cd767d5f4 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -37,12 +37,17 @@ parser.add_option('-v', '--vector', dest = 'vector', action = 'extend', metavar = '', help = 'vector value label(s)') +parser.add_option('-t', '--tensor', + dest = 'tensor', + action = 'extend', metavar = '', + help = 'tensor (3x3) value label(s)') parser.add_option('-c', '--color', dest='color', action='extend', metavar ='', help = 'RGB color tuples') parser.set_defaults(scalar = [], vector = [], + tensor = [], color = [], inplace = False, render = False, @@ -94,9 +99,10 @@ for name in filenames: errors = [] VTKarray = {} active = defaultdict(list) - + for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], + ['tensor',9,options.tensor], ['color',3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): @@ -107,7 +113,7 @@ for name in filenames: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() + if datatype in ['scalar','vector', 'tensor']: VTKarray[me] = vtk.vtkDoubleArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() VTKarray[me].SetNumberOfComponents(dimension) @@ -119,20 +125,21 @@ for name in filenames: table.close(dismiss = True) continue -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- while table.data_read(): # read next data line of ASCII table - + for datatype,labels in active.items(): # loop over scalar,color for me in labels: # loop over all requested items theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) table.input_close() # close input ASCII table -# ------------------------------------------ add data --------------------------------------- +# ------------------------------------------ add data --------------------------------------- for datatype,labels in active.items(): # loop over scalar,color if datatype == 'color': @@ -145,7 +152,7 @@ for name in filenames: Polydata.Modified() if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update() -# ------------------------------------------ output result --------------------------------------- +# ------------------------------------------ output result --------------------------------------- writer = vtk.vtkXMLPolyDataWriter() writer.SetDataModeToBinary() @@ -155,7 +162,7 @@ for name in filenames: else: writer.SetInputData(Polydata) writer.Write() -# ------------------------------------------ render result --------------------------------------- +# ------------------------------------------ render result --------------------------------------- if options.render: mapper = vtk.vtkDataSetMapper() @@ -179,7 +186,7 @@ if options.render: iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) - + iren.Initialize() renWin.Render() iren.Start() From 32c4a20a46c5e7b9d0e1a3c4d8e5bd2203d57d8e Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Thu, 1 Sep 2016 15:37:49 -0400 Subject: [PATCH 19/26] clean up, added --debug to base class, renamed testPossible to feasible --- lib/damask/test/test.py | 69 +++++++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 30 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index c05a6474d..b54615c3a 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -17,40 +17,47 @@ class Test(): variants = [] - def __init__(self,test_description): + def __init__(self,description = ''): - logger = logging.getLogger() - logger.setLevel(0) fh = logging.FileHandler('test.log') # create file handler which logs even debug messages fh.setLevel(logging.DEBUG) - full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s') - fh.setFormatter(full) + fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')) + ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level ch.setLevel(logging.INFO) -# create formatter and add it to the handlers - plain = logging.Formatter('%(message)s') - ch.setFormatter(plain) -# add the handlers to the logger + ch.setFormatter(logging.Formatter('%(message)s')) + + logger = logging.getLogger() logger.addHandler(fh) logger.addHandler(ch) + logger.setLevel(0) - logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \ - +'----------------------------------------------------------------\n' \ - +'| '+test_description+'\n' \ - +'----------------------------------------------------------------') + logging.info('\n'.join(['+'*40, + '-'*40, + '| '+description, + '-'*40, + ])) self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) - self.parser = OptionParser( - description = test_description+' (using class: {})'.format(damask.version), - usage='./test.py [options]') - self.updateRequested = False - self.parser.add_option("-d", "--debug", action="store_true",\ - dest="debug",\ - help="debug run, don't calculate but use existing results") - self.parser.add_option("-p", "--pass", action="store_true",\ - dest="accept",\ - help="calculate results but always consider test as successfull") - self.parser.set_defaults(debug=False, - accept=False) + + self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version), + usage = './test.py [options]') + self.parser.add_option("-d", "--debug", + action = "store_true", + dest = "debug", + help = "debug run, don't calculate but use existing results") + self.parser.add_option("-p", "--pass", + action = "store_true", + dest = "accept", + help = "calculate results but always consider test as successfull") + self.parser.add_option("-u", "--update", + action = "store_true", + dest = "update", + help = "use current test results as new reference" + ) + self.parser.set_defaults(debug = False, + accept = False, + update = False, + ) def execute(self): """Run all variants and report first failure.""" @@ -65,15 +72,17 @@ class Test(): return variant+1 # return culprit return 0 else: - if not self.testPossible(): return -1 + if not self.feasible(): return -1 + self.clean() self.prepareAll() - for variant in xrange(len(self.variants)): + + for variant,name in enumerate(self.variants): try: self.prepare(variant) self.run(variant) self.postprocess(variant) - if self.updateRequested: # update requested + if self.options.update: # update requested self.update(variant) elif not (self.options.accept or self.compare(variant)): # no update, do comparison return variant+1 # return culprit @@ -82,8 +91,8 @@ class Test(): return variant+1 # return culprit return 0 - def testPossible(self): - """Check if test is possible or not (e.g. no license available).""" + def feasible(self): + """Check whether test is possible or not (e.g. no license available).""" return True def clean(self): From 20d1164e5d8ed5ac2b7cd44132b9c765ecb63dfc Mon Sep 17 00:00:00 2001 From: Chen Date: Thu, 1 Sep 2016 16:57:29 -0400 Subject: [PATCH 20/26] add tensor support for vtk_addRectlinearGridData --- processing/post/vtk_addRectilinearGridData.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 63e0bf783..283a076c7 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -38,6 +38,10 @@ parser.add_option('-v', '--vector', dest = 'vector', action = 'extend', metavar = '', help = 'vector value label(s)') +parser.add_option('-t', '--tensor', + dest = 'tensor', + action = 'extend', metavar = '', + help = 'tensor (3x3) value label(s)') parser.add_option('-c', '--color', dest = 'color', action = 'extend', metavar = '', @@ -45,6 +49,7 @@ parser.add_option('-c', '--color', parser.set_defaults(scalar = [], vector = [], + tensor = [], color = [], inplace = False, render = False, @@ -92,9 +97,10 @@ for name in filenames: errors = [] VTKarray = {} active = defaultdict(list) - + for datatype,dimension,label in [['scalar',1,options.scalar], ['vector',3,options.vector], + ['tensor',9,options.tensor], ['color',3,options.color], ]: for i,dim in enumerate(table.label_dimension(label)): @@ -105,7 +111,7 @@ for name in filenames: remarks.append('adding {} "{}"...'.format(datatype,me)) active[datatype].append(me) - if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() + if datatype in ['scalar','vector','tensor']: VTKarray[me] = vtk.vtkDoubleArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() VTKarray[me].SetNumberOfComponents(dimension) @@ -117,7 +123,7 @@ for name in filenames: table.close(dismiss = True) continue -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- datacount = 0 @@ -129,11 +135,12 @@ for name in filenames: theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) table.close() # close input ASCII table -# ------------------------------------------ add data --------------------------------------- +# ------------------------------------------ add data --------------------------------------- if datacount == Npoints: mode = 'point' elif datacount == Ncells: mode = 'cell' @@ -154,7 +161,7 @@ for name in filenames: rGrid.Modified() if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update() -# ------------------------------------------ output result --------------------------------------- +# ------------------------------------------ output result --------------------------------------- writer = vtk.vtkXMLRectilinearGridWriter() writer.SetDataModeToBinary() @@ -164,7 +171,7 @@ for name in filenames: else: writer.SetInputData(rGrid) writer.Write() -# ------------------------------------------ render result --------------------------------------- +# ------------------------------------------ render result --------------------------------------- if options.render: mapper = vtk.vtkDataSetMapper() @@ -188,7 +195,7 @@ if options.render: iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) - + iren.Initialize() renWin.Render() iren.Start() From d9077805e497e05f929b759ab24ddc1abb2e9997 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 2 Sep 2016 09:30:49 -0400 Subject: [PATCH 21/26] Forced symetric tensor when adding tensor to vtk VTK addTensor by default using the lower triangle to populate its tensor object. Enforcing the tensor to be symmetric to avoid necessary confusion when adding data (most stress/strain tensor should symmetric by default, so it should not affect the results) --- processing/post/vtk_addPointcloudData.py | 13 ++++++++----- processing/post/vtk_addRectilinearGridData.py | 7 +++++-- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index cd767d5f4..557b9fdd3 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -131,11 +131,14 @@ for name in filenames: for datatype,labels in active.items(): # loop over scalar,color for me in labels: # loop over all requested items - theData = [table.data[i] for i in table.label_indexrange(me)] # read strings - if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) - elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) + theData = [float(table.data[i]) for i in table.label_indexrange(me)] # read strings + if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*x),theData)) + elif datatype == 'scalar': VTKarray[me].InsertNextValue(theData[0]) + elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*theData) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ + np.array(theData) \ + .reshape(3,3).T \ + .reshape(9))) table.input_close() # close input ASCII table diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 283a076c7..8f639711b 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -134,9 +134,12 @@ for name in filenames: for me in labels: # loop over all requested items theData = [table.data[i] for i in table.label_indexrange(me)] # read strings if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) - elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) - elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) + elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) + elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+ + np.array(theData) \ + .reshape(3,3).T \ + .reshape(9))) table.close() # close input ASCII table From 2b3faf204c4e745a605ce52c4d6c4691899e9712 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:30:08 -0400 Subject: [PATCH 22/26] make table compare normalize data by type (scaler, vector, tensor) --- lib/damask/test/test.py | 169 ++++++++++++++++++++-------------------- 1 file changed, 85 insertions(+), 84 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index b54615c3a..023e7cb87 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -5,6 +5,7 @@ import os,sys,shutil import logging,logging.config import damask import numpy as np +import itertools from collections import Iterable from optparse import OptionParser @@ -16,7 +17,7 @@ class Test(): """ variants = [] - + def __init__(self,description = ''): fh = logging.FileHandler('test.log') # create file handler which logs even debug messages @@ -90,11 +91,11 @@ class Test(): logging.critical('\nWARNING:\n {}\n'.format(e)) return variant+1 # return culprit return 0 - + def feasible(self): """Check whether test is possible or not (e.g. no license available).""" return True - + def clean(self): """Delete directory tree containing current results.""" status = True @@ -112,7 +113,7 @@ class Test(): status = status and False return status - + def prepareAll(self): """Do all necessary preparations for the whole test""" return True @@ -120,7 +121,7 @@ class Test(): def prepare(self,variant): """Do all necessary preparations for the run of each test variant""" return True - + def run(self,variant): """Execute the requested test variant.""" @@ -152,17 +153,17 @@ class Test(): """Directory containing current results of the test.""" return os.path.normpath(os.path.join(self.dirBase,'current/')) - + def dirProof(self): """Directory containing human readable proof of correctness for the test.""" return os.path.normpath(os.path.join(self.dirBase,'proof/')) - + def fileInRoot(self,dir,file): """Path to a file in the root directory of DAMASK.""" return os.path.join(damask.Environment().rootDir(),dir,file) - + def fileInReference(self,file): """Path to a file in the refrence directory for the test.""" return os.path.join(self.dirReference(),file) @@ -172,7 +173,7 @@ class Test(): """Path to a file in the current results directory for the test.""" return os.path.join(self.dirCurrent(),file) - + def fileInProof(self,file): """Path to a file in the proof directory for the test.""" return os.path.join(self.dirProof(),file) @@ -189,58 +190,58 @@ class Test(): for source,target in zip(map(mapA,A),map(mapB,B)): try: - shutil.copy2(source,target) + shutil.copy2(source,target) except: logging.critical('error copying {} to {}'.format(source,target)) def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Reference2Current: Unable to copy file "{}"'.format(file)) - + def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): - + source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i])) except: logging.error(os.path.join(source,file)) logging.critical('Base2Current: Unable to copy file "{}"'.format(file)) def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i])) + shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i])) except: logging.critical('Current2Reference: Unable to copy file "{}"'.format(file)) - + def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]): - + if len(targetfiles) == 0: targetfiles = sourcefiles for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Proof2Current: Unable to copy file "{}"'.format(file)) - + def copy_Current2Current(self,sourcefiles=[],targetfiles=[]): - + for i,file in enumerate(sourcefiles): try: - shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) + shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) except: logging.critical('Current2Current: Unable to copy file "{}"'.format(file)) @@ -252,11 +253,11 @@ class Test(): logging.info(error) logging.debug(out) - - return out,error - - + return out,error + + + def compare_Array(self,File1,File2): import numpy as np @@ -287,28 +288,28 @@ class Test(): def compare_ArrayRefCur(self,ref,cur=''): - + if cur =='': cur = ref refName = self.fileInReference(ref) curName = self.fileInCurrent(cur) return self.compare_Array(refName,curName) - + def compare_ArrayCurCur(self,cur0,cur1): - + cur0Name = self.fileInCurrent(cur0) cur1Name = self.fileInCurrent(cur1) return self.compare_Array(cur0Name,cur1Name) def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None, absoluteTolerance=False,perLine=False,skipLines=[]): - + import numpy as np logging.info('\n '.join(['comparing ASCII Tables',file0,file1])) if normHeadings == '': normHeadings = headings0 # check if comparison is possible and determine lenght of columns - if len(headings0) == len(headings1) == len(normHeadings): + if len(headings0) == len(headings1) == len(normHeadings): dataLength = len(headings0) length = [1 for i in xrange(dataLength)] shape = [[] for i in xrange(dataLength)] @@ -316,14 +317,14 @@ class Test(): maxError = [0.0 for i in xrange(dataLength)] absTol = [absoluteTolerance for i in xrange(dataLength)] column = [[1 for i in xrange(dataLength)] for j in xrange(2)] - + norm = [[] for i in xrange(dataLength)] normLength = [1 for i in xrange(dataLength)] normShape = [[] for i in xrange(dataLength)] normColumn = [1 for i in xrange(dataLength)] for i in xrange(dataLength): - if headings0[i]['shape'] != headings1[i]['shape']: + if headings0[i]['shape'] != headings1[i]['shape']: raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) shape[i] = headings0[i]['shape'] for j in xrange(np.shape(shape[i])[0]): @@ -339,7 +340,7 @@ class Test(): table0 = damask.ASCIItable(name=file0,readonly=True) table0.head_read() table1 = damask.ASCIItable(name=file1,readonly=True) - table1.head_read() + table1.head_read() for i in xrange(dataLength): key0 = ('1_' if length[i]>1 else '') + headings0[i]['label'] @@ -355,7 +356,7 @@ class Test(): column[0][i] = table0.label_index(key0) column[1][i] = table1.label_index(key1) normColumn[i] = table0.label_index(normKey) - + line0 = 0 while table0.data_read(): # read next data line of ASCII table if line0 not in skipLines: @@ -370,7 +371,7 @@ class Test(): else: norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType)) line0 += 1 - + for i in xrange(dataLength): if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))] data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]]) @@ -441,14 +442,14 @@ class Test(): logging.info(files[i]+':'+','.join(columns[i])) if len(files) < 2: return True # single table is always close to itself... - + data = [] for table,labels in zip(tables,columns): table.data_readArray(labels) data.append(table.data) table.close() - - + + for i in xrange(1,len(data)): delta = data[i]-data[i-1] normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 @@ -457,7 +458,7 @@ class Test(): std = np.amax(np.std(normedDelta,0)) logging.info('mean: {:f}'.format(mean)) logging.info('std: {:f}'.format(std)) - + return (mean0.0, maximum, 1) # avoid div by zero for empty columns + + maximum = np.where(maximum > 0.0, maximum, 1) # avoid div by zero for empty columns + + + # normalize each table for i in xrange(len(data)): data[i] /= maximum - - mask = np.zeros_like(table.data,dtype='bool') - for table in data: - mask |= np.where(np.abs(table) Date: Fri, 2 Sep 2016 14:31:00 -0400 Subject: [PATCH 23/26] remove unused import for test.py --- lib/damask/test/test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index 023e7cb87..cffc318a6 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -5,7 +5,6 @@ import os,sys,shutil import logging,logging.config import damask import numpy as np -import itertools from collections import Iterable from optparse import OptionParser From 4c6b8c490f015b66a25d57820e3917f9c14fe3d4 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:34:35 -0400 Subject: [PATCH 24/26] change one-line docstring to fit in one line. --- lib/damask/test/test.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index cffc318a6..b64485234 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -468,9 +468,7 @@ class Test(): rtol = 1e-5, atol = 1e-8, debug = False): - """ - compare tables with np.allclose - """ + """ compare tables with np.allclose """ if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] From d41ac4e46354abd134fb65c65d0dbc6f8f9b8969 Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:35:23 -0400 Subject: [PATCH 25/26] remove white space in docstring --- lib/damask/test/test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/damask/test/test.py b/lib/damask/test/test.py index b64485234..10fb993e2 100644 --- a/lib/damask/test/test.py +++ b/lib/damask/test/test.py @@ -468,7 +468,7 @@ class Test(): rtol = 1e-5, atol = 1e-8, debug = False): - """ compare tables with np.allclose """ + """compare tables with np.allclose""" if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] From a2f3839ca84a1f6ee006b796dcecf409b1c2d2bd Mon Sep 17 00:00:00 2001 From: Chen Date: Fri, 2 Sep 2016 14:37:15 -0400 Subject: [PATCH 26/26] add missing numpy import --- processing/post/vtk_addPointcloudData.py | 1 + processing/post/vtk_addRectilinearGridData.py | 1 + 2 files changed, 2 insertions(+) diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 557b9fdd3..9736e54d0 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -3,6 +3,7 @@ import os,vtk import damask +import numpy as np from collections import defaultdict from optparse import OptionParser diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 8f639711b..df85288af 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -3,6 +3,7 @@ import os,vtk import damask +import numpy as np from collections import defaultdict from optparse import OptionParser