diff --git a/DAMASK_env.sh b/DAMASK_env.sh index ad3c59dea..16850d6b4 100644 --- a/DAMASK_env.sh +++ b/DAMASK_env.sh @@ -53,8 +53,10 @@ if [ ! -z "$PS1" ]; then [[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER" [[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING" echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS" - [[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \ - [[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR` + if [ "x$PETSC_DIR" != "x" ]; then + echo "PETSc location $PETSC_DIR" + [[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR` + fi [[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH" echo "MSC.Marc/Mentat $MSC_ROOT" echo diff --git a/DAMASK_env.zsh b/DAMASK_env.zsh index 5cf5b30f8..b4b2d6953 100644 --- a/DAMASK_env.zsh +++ b/DAMASK_env.zsh @@ -51,8 +51,10 @@ if [ ! -z "$PS1" ]; then [[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER" [[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING" echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS" - [[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \ - [[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR` + if [ "x$PETSC_DIR" != "x" ]; then + echo "PETSc location $PETSC_DIR" + [[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR` + fi [[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH" echo "MSC.Marc/Mentat $MSC_ROOT" echo diff --git a/code/.gitignore b/code/.gitignore index 9467b3147..6184a3d0a 100644 --- a/code/.gitignore +++ b/code/.gitignore @@ -1 +1,2 @@ DAMASK_marc*.f90 +quit__genmod.f90 diff --git a/code/DAMASK_spectral.f90 b/code/DAMASK_spectral.f90 index 3f4d6764e..e72cd3dec 100644 --- a/code/DAMASK_spectral.f90 +++ b/code/DAMASK_spectral.f90 @@ -59,8 +59,6 @@ program DAMASK_spectral materialpoint_sizeResults, & materialpoint_results, & materialpoint_postResults - - use material, only: & thermal_type, & damage_type, & @@ -439,14 +437,14 @@ program DAMASK_spectral if (.not. appendToOutFile) then ! if not restarting, write 0th increment do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output - outputIndex=[(i-1)*((maxByteOut/pReal)/materialpoint_sizeResults)+1, & - min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))] + outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, & + min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),& [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,& MPI_DOUBLE, MPI_STATUS_IGNORE, ierr) - fileOffset = fileOffset + sum(outputSize) ! forward to current file position enddo + fileOffset = fileOffset + sum(outputSize) ! forward to current file position if (worldrank == 0) & write(6,'(1/,a)') ' ... writing initial configuration to file ........................' endif @@ -646,14 +644,14 @@ program DAMASK_spectral call materialpoint_postResults() call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr) do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output - outputIndex=[(i-1)*maxByteOut/pReal/materialpoint_sizeResults+1, & - min(i*maxByteOut/pReal/materialpoint_sizeResults,size(materialpoint_results,3))] + outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, & + min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),& [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & (outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,& MPI_DOUBLE, MPI_STATUS_IGNORE, ierr) - fileOffset = fileOffset + sum(outputSize) ! forward to current file position enddo + fileOffset = fileOffset + sum(outputSize) ! forward to current file position endif if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? diff --git a/lib/damask/.gitignore b/lib/damask/.gitignore new file mode 100644 index 000000000..fd736fe35 --- /dev/null +++ b/lib/damask/.gitignore @@ -0,0 +1 @@ +core.so diff --git a/processing/post/addQuaternions.py b/processing/post/addQuaternions.py deleted file mode 100755 index fe0e3781f..000000000 --- a/processing/post/addQuaternions.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env python -# -*- coding: UTF-8 no BOM -*- - -import os,sys,numpy as np -from optparse import OptionParser -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Add Quaternions based on Crystal Frame Coordinates. - -""", version = scriptID) - -parser.add_option('-f','--frame', - dest='frame', nargs=4, type='string', metavar='', - help='heading of columns containing b* vector components and three frame vectors in that order') -parser.add_option('-s','--symmetry', - dest='crysym', nargs=1,type='string',metavar='', - help='crystal symmetry definition') -parser.set_defaults(frame = None) - -(options,filenames) = parser.parse_args() - -if options.frame is None: - parser.error('no data column specified...') - -datainfo = {'len':4, - 'label':[] - } - -if options.frame is not None: datainfo['label'] += options.frame - -# --- loop over input files ------------------------------------------------------------------------- - -if filenames == []: filenames = [None] - -for name in filenames: - try: - table = damask.ASCIItable(name = name, - buffered = False) - except: continue - damask.util.report(scriptName,name) - - table.head_read() # read ASCII header info - -# --------------- figure out columns to process --------------------------------------------------- - active = [] - column = {} - - for label in datainfo['label']: - key = '1_'+label if datainfo['len'] > 1 else label # non-special labels have to start with '1_' - if key in table.labels: - active.append(label) - column[label] = table.labels.index(key) # remember columns of requested data - else: - damask.util.croak('column %s not found...'%label) - -# ------------------------------------------ assemble header --------------------------------------- - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - table.labels_append(['Q_%i'%(i+1) for i in xrange(4)]) # extend ASCII header with new labels [1 real, 3 imaginary components] - table.head_write() - -# ------------------------------------------ process data ------------------------------------------ - outputAlive = True - while outputAlive and table.data_read(): # read next data line of ASCII table - vec = np.zeros([4,3]) - for i,label in enumerate(active): - vec[i,:] = np.array(table.data[column[label]: - column[label]+3]) - - if sys.argv[1:][6]=='hexagonal': # Ensure Input matrix is orthogonal - M=np.dot(vec[0,:],vec[2,:]) - vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:]) - vec[2,:]=M*(vec[0,:]/np.linalg.norm(vec[0,:])) - vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:]) - else: - vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:]) - vec[2,:]=vec[2,:]/np.linalg.norm(vec[2,:]) - vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:]) - - - Ori=damask.Orientation(matrix=vec[1:,:],symmetry=sys.argv[1:][6]) - - table.data_append(np.asarray(Ori.asQuaternion())) - - - outputAlive = table.data_write() # output processed line - -# ------------------------------------------ output result ----------------------------------------- - outputAlive and table.output_flush() # just in case of buffered ASCII table - - table.close() # close ASCII tables