Merge branch 'LargeFiles' into development

This commit is contained in:
Martin Diehl 2016-03-22 15:45:42 +01:00
commit fafedd5cd6
6 changed files with 16 additions and 112 deletions

View File

@ -53,8 +53,10 @@ if [ ! -z "$PS1" ]; then
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
if [ "x$PETSC_DIR" != "x" ]; then
echo "PETSc location $PETSC_DIR"
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
fi
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
echo "MSC.Marc/Mentat $MSC_ROOT"
echo

View File

@ -51,8 +51,10 @@ if [ ! -z "$PS1" ]; then
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
if [ "x$PETSC_DIR" != "x" ]; then
echo "PETSc location $PETSC_DIR"
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
fi
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
echo "MSC.Marc/Mentat $MSC_ROOT"
echo

1
code/.gitignore vendored
View File

@ -1 +1,2 @@
DAMASK_marc*.f90
quit__genmod.f90

View File

@ -59,8 +59,6 @@ program DAMASK_spectral
materialpoint_sizeResults, &
materialpoint_results, &
materialpoint_postResults
use material, only: &
thermal_type, &
damage_type, &
@ -439,14 +437,14 @@ program DAMASK_spectral
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=[(i-1)*((maxByteOut/pReal)/materialpoint_sizeResults)+1, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))]
outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if (worldrank == 0) &
write(6,'(1/,a)') ' ... writing initial configuration to file ........................'
endif
@ -646,14 +644,14 @@ program DAMASK_spectral
call materialpoint_postResults()
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=[(i-1)*maxByteOut/pReal/materialpoint_sizeResults+1, &
min(i*maxByteOut/pReal/materialpoint_sizeResults,size(materialpoint_results,3))]
outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
endif
if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving
mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write?

1
lib/damask/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
core.so

View File

@ -1,100 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add Quaternions based on Crystal Frame Coordinates.
""", version = scriptID)
parser.add_option('-f','--frame',
dest='frame', nargs=4, type='string', metavar='<string string string string>',
help='heading of columns containing b* vector components and three frame vectors in that order')
parser.add_option('-s','--symmetry',
dest='crysym', nargs=1,type='string',metavar='<string>',
help='crystal symmetry definition')
parser.set_defaults(frame = None)
(options,filenames) = parser.parse_args()
if options.frame is None:
parser.error('no data column specified...')
datainfo = {'len':4,
'label':[]
}
if options.frame is not None: datainfo['label'] += options.frame
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
table.head_read() # read ASCII header info
# --------------- figure out columns to process ---------------------------------------------------
active = []
column = {}
for label in datainfo['label']:
key = '1_'+label if datainfo['len'] > 1 else label # non-special labels have to start with '1_'
if key in table.labels:
active.append(label)
column[label] = table.labels.index(key) # remember columns of requested data
else:
damask.util.croak('column %s not found...'%label)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['Q_%i'%(i+1) for i in xrange(4)]) # extend ASCII header with new labels [1 real, 3 imaginary components]
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
vec = np.zeros([4,3])
for i,label in enumerate(active):
vec[i,:] = np.array(table.data[column[label]:
column[label]+3])
if sys.argv[1:][6]=='hexagonal': # Ensure Input matrix is orthogonal
M=np.dot(vec[0,:],vec[2,:])
vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:])
vec[2,:]=M*(vec[0,:]/np.linalg.norm(vec[0,:]))
vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:])
else:
vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:])
vec[2,:]=vec[2,:]/np.linalg.norm(vec[2,:])
vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:])
Ori=damask.Orientation(matrix=vec[1:,:],symmetry=sys.argv[1:][6])
table.data_append(np.asarray(Ori.asQuaternion()))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output result -----------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table
table.close() # close ASCII tables