Merge branch 'development' of magit1.mpie.de:damask/DAMASK into development

This commit is contained in:
Philip Eisenlohr 2016-04-25 10:24:30 -05:00
commit 0f0ca8cd71
14 changed files with 188 additions and 646 deletions

View File

@ -1,19 +0,0 @@
:: sets up an environment for DAMASK on Windows
:: usage: call DAMASK_env.bat
@echo off
set LOCATION=%~dp0
set DAMASK_ROOT=%LOCATION%\DAMASK
set DAMASK_NUM_THREADS=2
chcp 1252
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
echo.
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
echo http://damask.mpie.de
echo.
echo Preparing environment ...
echo DAMASK_ROOT=%DAMASK_ROOT%
echo DAMASK_NUM_THREADS=%DAMASK_NUM_THREADS%
set DAMASK_BIN=%DAMASK_ROOT%\bin
set PATH=%PATH%;%DAMASK_BIN%
set PYTHONPATH=%PYTHONPATH%;%DAMASK_ROOT%\lib

View File

@ -1 +1 @@
v2.0.0-97-g8b27de7
v2.0.0-160-g26f437b

View File

@ -258,7 +258,8 @@ subroutine crystallite_init
allocate(crystallite_orientation(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_orientation0(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_rotation(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_disorientation(4,nMax,cMax,iMax,eMax), source=0.0_pReal)
if (any(plasticState%nonLocal)) &
allocate(crystallite_disorientation(4,nMax,cMax,iMax,eMax),source=0.0_pReal)
allocate(crystallite_localPlasticity(cMax,iMax,eMax), source=.true.)
allocate(crystallite_requested(cMax,iMax,eMax), source=.false.)
allocate(crystallite_todo(cMax,iMax,eMax), source=.false.)
@ -3977,50 +3978,51 @@ subroutine crystallite_orientations
! --- CALCULATE ORIENTATION AND LATTICE ROTATION ---
!$OMP PARALLEL DO PRIVATE(orientation)
nonlocalPresent: if (any(plasticState%nonLocal)) then
!$OMP PARALLEL DO PRIVATE(orientation)
do e = FEsolving_execElem(1),FEsolving_execElem(2)
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
do c = 1_pInt,homogenization_Ngrains(mesh_element(3,e))
! somehow this subroutine is not threadsafe, so need critical statement here; not clear, what exactly the problem is
!$OMP CRITICAL (polarDecomp)
!$OMP CRITICAL (polarDecomp)
orientation = math_RtoQ(transpose(math_rotationalPart33(crystallite_Fe(1:3,1:3,c,i,e)))) ! rotational part from polar decomposition as quaternion
!$OMP END CRITICAL (polarDecomp)
!$OMP END CRITICAL (polarDecomp)
crystallite_rotation(1:4,c,i,e) = lattice_qDisorientation(crystallite_orientation0(1:4,c,i,e), & ! active rotation from ori0
orientation) ! to current orientation (with no symmetry)
orientation) ! to current orientation (with no symmetry)
crystallite_orientation(1:4,c,i,e) = orientation
enddo; enddo; enddo
!$OMP END PARALLEL DO
!$OMP END PARALLEL DO
! --- UPDATE SOME ADDITIONAL VARIABLES THAT ARE NEEDED FOR NONLOCAL MATERIAL ---
! --- we use crystallite_orientation from above, so need a separate loop
!$OMP PARALLEL DO PRIVATE(myPhase,neighboring_e,neighboring_i,neighboringPhase)
!$OMP PARALLEL DO PRIVATE(myPhase,neighboring_e,neighboring_i,neighboringPhase)
do e = FEsolving_execElem(1),FEsolving_execElem(2)
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
myPhase = material_phase(1,i,e) ! get my phase (non-local models make no sense with more than one grain per material point)
if (plasticState(myPhase)%nonLocal) then ! if nonlocal model
myPhase = material_phase(1,i,e) ! get my phase (non-local models make no sense with more than one grain per material point)
if (plasticState(myPhase)%nonLocal) then ! if nonlocal model
! --- calculate disorientation between me and my neighbor ---
do n = 1_pInt,FE_NipNeighbors(FE_celltype(FE_geomtype(mesh_element(2,e)))) ! loop through my neighbors
do n = 1_pInt,FE_NipNeighbors(FE_celltype(FE_geomtype(mesh_element(2,e)))) ! loop through my neighbors
neighboring_e = mesh_ipNeighborhood(1,n,i,e)
neighboring_i = mesh_ipNeighborhood(2,n,i,e)
if (neighboring_e > 0 .and. neighboring_i > 0) then ! if neighbor exists
neighboringPhase = material_phase(1,neighboring_i,neighboring_e) ! get my neighbor's phase
if (plasticState(neighboringPhase)%nonLocal) then ! neighbor got also nonlocal plasticity
if (lattice_structure(myPhase) == lattice_structure(neighboringPhase)) then ! if my neighbor has same crystal structure like me
if (neighboring_e > 0 .and. neighboring_i > 0) then ! if neighbor exists
neighboringPhase = material_phase(1,neighboring_i,neighboring_e) ! get my neighbor's phase
if (plasticState(neighboringPhase)%nonLocal) then ! neighbor got also nonlocal plasticity
if (lattice_structure(myPhase) == lattice_structure(neighboringPhase)) then ! if my neighbor has same crystal structure like me
crystallite_disorientation(:,n,1,i,e) = &
lattice_qDisorientation( crystallite_orientation(1:4,1,i,e), &
crystallite_orientation(1:4,1,neighboring_i,neighboring_e), &
lattice_structure(myPhase)) ! calculate disorientation for given symmetry
else ! for neighbor with different phase
crystallite_disorientation(:,n,1,i,e) = [0.0_pReal, 1.0_pReal, 0.0_pReal, 0.0_pReal] ! 180 degree rotation about 100 axis
lattice_structure(myPhase)) ! calculate disorientation for given symmetry
else ! for neighbor with different phase
crystallite_disorientation(:,n,1,i,e) = [0.0_pReal, 1.0_pReal, 0.0_pReal, 0.0_pReal]! 180 degree rotation about 100 axis
endif
else ! for neighbor with local plasticity
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
else ! for neighbor with local plasticity
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal]! homomorphic identity
endif
else ! no existing neighbor
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
else ! no existing neighbor
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
endif
enddo
@ -4031,7 +4033,8 @@ subroutine crystallite_orientations
endif
enddo; enddo
!$OMP END PARALLEL DO
!$OMP END PARALLEL DO
endif nonlocalPresent
end subroutine crystallite_orientations

View File

@ -94,11 +94,11 @@ module lattice
LATTICE_fcc_NcleavageSystem = int([3, 4, 0],pInt) !< total # of cleavage systems per family for fcc
integer(pInt), parameter, private :: &
LATTICE_fcc_Nslip = 12_pInt, & ! sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
LATTICE_fcc_Ntwin = 12_pInt, & ! sum(lattice_fcc_NtwinSystem) !< total # of twin systems for fcc
LATTICE_fcc_Nslip = sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
LATTICE_fcc_Ntwin = sum(lattice_fcc_NtwinSystem), & !< total # of twin systems for fcc
LATTICE_fcc_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for fcc
LATTICE_fcc_Ntrans = 12_pInt, & !< total # of transformations for fcc
LATTICE_fcc_Ncleavage = 7_pInt !< total # of cleavage systems for fcc
LATTICE_fcc_Ntrans = sum(lattice_fcc_NtransSystem), & !< total # of transformation systems for fcc
LATTICE_fcc_Ncleavage = sum(lattice_fcc_NcleavageSystem) !< total # of cleavage systems for fcc
real(pReal), dimension(3+3,LATTICE_fcc_Nslip), parameter, private :: &
LATTICE_fcc_systemSlip = reshape(real([&
@ -365,7 +365,7 @@ module lattice
!--------------------------------------------------------------------------------------------------
! bcc
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_bcc_NslipSystem = int([ 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], pInt) !< total # of slip systems per family for bcc
LATTICE_bcc_NslipSystem = int([ 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], pInt) !< total # of slip systems per family for bcc
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_bcc_NtwinSystem = int([ 12, 0, 0, 0], pInt) !< total # of twin systems per family for bcc
@ -374,16 +374,15 @@ module lattice
LATTICE_bcc_NtransSystem = int([0,0],pInt) !< total # of transformation systems per family for bcc
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_bcc_NcleavageSystem = int([3,6,0],pInt) !< total # of cleavage systems per family for bcc
LATTICE_bcc_NcleavageSystem = int([3,6,0],pInt) !< total # of cleavage systems per family for bcc
integer(pInt), parameter, private :: &
LATTICE_bcc_Nslip = 24_pInt, & ! sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
LATTICE_bcc_Ntwin = 12_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bcc
LATTICE_bcc_NnonSchmid = 6_pInt, & !< # of non-Schmid contributions for bcc. 6 known non schmid contributions for BCC (A. Koester, A. Ma, A. Hartmaier 2012)
LATTICE_bcc_Ntrans = 0_pInt, & !< total # of transformations for bcc
LATTICE_bcc_Ncleavage = 9_pInt !< total # of cleavage systems for bcc
LATTICE_bcc_Nslip = sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
LATTICE_bcc_Ntwin = sum(lattice_bcc_NtwinSystem), & !< total # of twin systems for bcc
LATTICE_bcc_NnonSchmid = 6_pInt, & !< total # of non-Schmid contributions for bcc (A. Koester, A. Ma, A. Hartmaier 2012)
LATTICE_bcc_Ntrans = sum(lattice_bcc_NtransSystem), & !< total # of transformation systems for bcc
LATTICE_bcc_Ncleavage = sum(lattice_bcc_NcleavageSystem) !< total # of cleavage systems for bcc
real(pReal), dimension(3+3,LATTICE_bcc_Nslip), parameter, private :: &
LATTICE_bcc_systemSlip = reshape(real([&
! Slip direction Plane normal
@ -563,7 +562,7 @@ module lattice
!--------------------------------------------------------------------------------------------------
! hex
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
lattice_hex_NslipSystem = int([ 3, 3, 3, 6, 12, 6, 0, 0, 0, 0, 0, 0, 0],pInt) !< # of slip systems per family for hex
lattice_hex_NslipSystem = int([ 3, 3, 3, 6, 12, 6, 0, 0, 0, 0, 0, 0, 0],pInt) !< # of slip systems per family for hex
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
lattice_hex_NtwinSystem = int([ 6, 6, 6, 6],pInt) !< # of slip systems per family for hex
@ -572,14 +571,14 @@ module lattice
LATTICE_hex_NtransSystem = int([0,0],pInt) !< total # of transformation systems per family for hex
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_hex_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for hex
LATTICE_hex_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for hex
integer(pInt), parameter , private :: &
LATTICE_hex_Nslip = 33_pInt, & ! sum(lattice_hex_NslipSystem), !< total # of slip systems for hex
LATTICE_hex_Ntwin = 24_pInt, & ! sum(lattice_hex_NtwinSystem) !< total # of twin systems for hex
LATTICE_hex_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for hex
LATTICE_hex_Ntrans = 0_pInt, & !< total # of transformations for hex
LATTICE_hex_Ncleavage = 3_pInt !< total # of transformations for hex
integer(pInt), parameter, private :: &
LATTICE_hex_Nslip = sum(lattice_hex_NslipSystem), & !< total # of slip systems for hex
LATTICE_hex_Ntwin = sum(lattice_hex_NtwinSystem), & !< total # of twin systems for hex
LATTICE_hex_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for hex
LATTICE_hex_Ntrans = sum(lattice_hex_NtransSystem), & !< total # of transformation systems for hex
LATTICE_hex_Ncleavage = sum(lattice_hex_NcleavageSystem) !< total # of cleavage systems for hex
real(pReal), dimension(4+4,LATTICE_hex_Nslip), parameter, private :: &
LATTICE_hex_systemSlip = reshape(real([&
@ -842,7 +841,6 @@ module lattice
],pReal),[ 4_pInt + 4_pInt,LATTICE_hex_Ncleavage])
!--------------------------------------------------------------------------------------------------
! bct
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
@ -856,14 +854,13 @@ module lattice
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_bct_NcleavageSystem = int([0,0,0],pInt) !< total # of cleavage systems per family for bct
integer(pInt), parameter , private :: &
LATTICE_bct_Nslip = 52_pInt, & ! sum(lattice_bct_NslipSystem), !< total # of slip systems for bct
LATTICE_bct_Ntwin = 0_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bct
LATTICE_bct_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for bct
LATTICE_bct_Ntrans = 0_pInt, & !< total # of transformations for bct
LATTICE_bct_Ncleavage = 0_pInt !< total # of transformations for bct
integer(pInt), parameter, private :: &
LATTICE_bct_Nslip = sum(lattice_bct_NslipSystem), & !< total # of slip systems for bct
LATTICE_bct_Ntwin = sum(lattice_bct_NtwinSystem), & !< total # of twin systems for bct
LATTICE_bct_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for bct
LATTICE_bct_Ntrans = sum(lattice_bct_NtransSystem), & !< total # of transformation systems for bct
LATTICE_bct_Ncleavage = sum(lattice_bct_NcleavageSystem) !< total # of cleavage systems for bct
real(pReal), dimension(3+3,LATTICE_bct_Nslip), parameter, private :: &
LATTICE_bct_systemSlip = reshape(real([&
@ -1007,10 +1004,10 @@ module lattice
!--------------------------------------------------------------------------------------------------
! isotropic
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for isotropic
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for iso
integer(pInt), parameter, private :: &
LATTICE_iso_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
LATTICE_iso_Ncleavage = sum(LATTICE_iso_NcleavageSystem) !< total # of cleavage systems for iso
real(pReal), dimension(3+3,LATTICE_iso_Ncleavage), parameter, private :: &
LATTICE_iso_systemCleavage = reshape(real([&
@ -1023,10 +1020,10 @@ module lattice
!--------------------------------------------------------------------------------------------------
! orthorhombic
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< total # of cleavage systems per family for orthotropic
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< total # of cleavage systems per family for ortho
integer(pInt), parameter, private :: &
LATTICE_ortho_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
LATTICE_ortho_Ncleavage = sum(LATTICE_ortho_NcleavageSystem) !< total # of cleavage systems for ortho
real(pReal), dimension(3+3,LATTICE_ortho_Ncleavage), parameter, private :: &
LATTICE_ortho_systemCleavage = reshape(real([&
@ -1036,16 +1033,16 @@ module lattice
1, 0, 0, 0, 0, 1 &
],pReal),[ 3_pInt + 3_pInt,LATTICE_ortho_Ncleavage])
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
lattice_C66, lattice_trans_C66
real(pReal), dimension(:,:,:,:,:), allocatable, public, protected :: &
real(pReal), dimension(:,:,:,:,:), allocatable, public, protected :: &
lattice_C3333, lattice_trans_C3333
real(pReal), dimension(:), allocatable, public, protected :: &
real(pReal), dimension(:), allocatable, public, protected :: &
lattice_mu, &
lattice_nu, &
lattice_trans_mu, &
lattice_trans_nu
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
lattice_thermalConductivity33, &
lattice_thermalExpansion33, &
lattice_damageDiffusion33, &
@ -1054,7 +1051,7 @@ module lattice
lattice_porosityDiffusion33, &
lattice_hydrogenfluxDiffusion33, &
lattice_hydrogenfluxMobility33
real(pReal), dimension(:), allocatable, public, protected :: &
real(pReal), dimension(:), allocatable, public, protected :: &
lattice_damageMobility, &
lattice_porosityMobility, &
lattice_massDensity, &

View File

@ -1645,14 +1645,14 @@ pure function math_qToAxisAngle(Q)
real(pReal) :: halfAngle, sinHalfAngle
real(pReal), dimension(4) :: math_qToAxisAngle
halfAngle = acos(max(-1.0_pReal, min(1.0_pReal, Q(1)))) ! limit to [-1,1] --> 0 to 180 deg
halfAngle = acos(math_limit(Q(1),-1.0_pReal,1.0_pReal))
sinHalfAngle = sin(halfAngle)
if (sinHalfAngle <= 1.0e-4_pReal) then ! very small rotation angle?
smallRotation: if (sinHalfAngle <= 1.0e-4_pReal) then
math_qToAxisAngle = 0.0_pReal
else
else smallRotation
math_qToAxisAngle= [ Q(2:4)/sinHalfAngle, halfAngle*2.0_pReal]
endif
endif smallRotation
end function math_qToAxisAngle

View File

@ -7,7 +7,6 @@
!! untextured polycrystal
!--------------------------------------------------------------------------------------------------
module plastic_isotropic
use prec, only: &
pReal,&
pInt, &

View File

@ -303,36 +303,45 @@ class Colormap():
'interpolate',
]
__predefined__ = {
'gray': {'left': Color('HSL',[0,1,1]),
'gray': {'left': Color('HSL',[0,1,1]),
'right': Color('HSL',[0,0,0.15]),
'interpolate': 'perceptualuniform'},
'grey': {'left': Color('HSL',[0,1,1]),
'grey': {'left': Color('HSL',[0,1,1]),
'right': Color('HSL',[0,0,0.15]),
'interpolate': 'perceptualuniform'},
'red': {'left': Color('HSL',[0,1,0.14]),
'red': {'left': Color('HSL',[0,1,0.14]),
'right': Color('HSL',[0,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'green': {'left': Color('HSL',[0.33333,1,0.14]),
'green': {'left': Color('HSL',[0.33333,1,0.14]),
'right': Color('HSL',[0.33333,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'blue': {'left': Color('HSL',[0.66,1,0.14]),
'blue': {'left': Color('HSL',[0.66,1,0.14]),
'right': Color('HSL',[0.66,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
'right': Color('HSL',[0.40000,0.1,0.9]),
'interpolate': 'perceptualuniform'},
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
'right': Color('HSL',[0.11,0.75,0.38]),
'interpolate': 'perceptualuniform'},
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
'right': Color('HSL',[0.33333,1.0,0.14]),
'interpolate': 'perceptualuniform'},
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
'right': Color('HSL',[0.97,0.96,0.36]),
'interpolate': 'perceptualuniform'},
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
'right': Color('HSL',[0,1,0.5]),
'interpolate': 'linear' },
'orientation': {'left': Color('RGB',[0.933334,0.878432,0.878431]),
'right': Color('RGB',[0.250980,0.007843,0.000000]),
'interpolate': 'perceptualuniform'},
'strain': {'left': Color('RGB',[0.941177,0.941177,0.870588]),
'right': Color('RGB',[0.266667,0.266667,0.000000]),
'interpolate': 'perceptualuniform'},
'stress': {'left': Color('RGB',[0.878432,0.874511,0.949019]),
'right': Color('RGB',[0.000002,0.000000,0.286275]),
'interpolate': 'perceptualuniform'},
}
@ -344,7 +353,7 @@ class Colormap():
predefined = None
):
if str(predefined).lower() in self.__predefined__:
if predefined is not None:
left = self.__predefined__[predefined.lower()]['left']
right= self.__predefined__[predefined.lower()]['right']
interpolate = self.__predefined__[predefined.lower()]['interpolate']
@ -442,11 +451,12 @@ class Colormap():
format = format.lower() # consistent comparison basis
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
if format == 'paraview':
colormap = ['<ColorMap name="'+str(name)+'" space="Diverging">'] \
+ ['<Point x="%i"'%i + ' o="1" r="%g" g="%g" b="%g"/>'%(color[0],color[1],color[2],) for i,color in colors] \
+ ['</ColorMap>']
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
for i,color in enumerate(colors[:-1])]\
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(i+1,colors[-1][0],colors[-1][1],colors[-1][2],)]\
+ [' ]\n }\n]']
elif format == 'gmsh':
colormap = ['View.ColorTable = {'] \

View File

@ -10,6 +10,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def curlFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
@ -17,8 +18,8 @@ def curlFFT(geomdim,field):
if n == 3: dataType = 'vector'
elif n == 9: dataType = 'tensor'
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
curl_fourier = np.zeros(field_fourier.shape,'c16')
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
curl_fourier = np.empty(field_fourier.shape,'c16')
# differentiation in Fourier space
k_s = np.zeros([3],'i')
@ -55,32 +56,32 @@ def curlFFT(geomdim,field):
curl_fourier[i,j,k,2] = ( field_fourier[i,j,k,1]*xi[0]\
-field_fourier[i,j,k,0]*xi[1]) *TWOPIIMG
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2)).reshape([N,n])
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing curl of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and tensor-valued fields.
Deals with both vector- and tensor fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'coords',
type = 'string', metavar = 'string',
help = 'column label of coordinates [%default]')
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of vector field values')
help = 'label(s) of vector field values')
parser.add_option('-t','--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of tensor field values')
help = 'label(s) of tensor field values')
parser.set_defaults(coords = 'pos',
)
@ -90,7 +91,7 @@ parser.set_defaults(coords = 'pos',
if options.vector is None and options.tensor is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
@ -147,7 +148,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
# ------------------------------------------ process value field -----------------------------------

View File

@ -10,15 +10,16 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def divFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
div_fourier = np.zeros(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
div_fourier = np.empty(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
# differentiation in Fourier space
k_s=np.zeros([3],'i')
k_s = np.zeros([3],'i')
TWOPIIMG = 2.0j*math.pi
for i in xrange(grid[2]):
k_s[0] = i
@ -41,32 +42,32 @@ def divFFT(geomdim,field):
elif n == 3: # vector, 3 -> 1
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2)).reshape([N,n/3])
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n/3])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing divergence of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and tensor-valued fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'coords',
type = 'string', metavar = 'string',
help = 'column label of coordinates [%default]')
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of vector field values')
help = 'label(s) of vector field values')
parser.add_option('-t','--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of tensor field values')
help = 'label(s) of tensor field values')
parser.set_defaults(coords = 'pos',
)

View File

@ -9,17 +9,17 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def gradFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
if n == 3: dataType = 'vector'
elif n == 1: dataType = 'scalar'
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
grad_fourier = np.zeros(field_fourier.shape+(3,),'c16')
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
grad_fourier = np.empty(field_fourier.shape+(3,),'c16')
# differentiation in Fourier space
k_s = np.zeros([3],'i')
@ -46,32 +46,32 @@ def gradFFT(geomdim,field):
grad_fourier[i,j,k,1,:] = field_fourier[i,j,k,1]*xi *TWOPIIMG # tensor field from vector data
grad_fourier[i,j,k,2,:] = field_fourier[i,j,k,2]*xi *TWOPIIMG
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2)).reshape([N,3*n])
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,3*n])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing gradient of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and scalar fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'coords',
type = 'string', metavar='string',
help = 'column label of coordinates [%default]')
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of vector field values')
help = 'label(s) of vector field values')
parser.add_option('-s','--scalar',
dest = 'scalar',
action = 'extend', metavar = '<string LIST>',
help = 'column label(s) of scalar field values')
help = 'label(s) of scalar field values')
parser.set_defaults(coords = 'pos',
)
@ -138,7 +138,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
# ------------------------------------------ process value field -----------------------------------

View File

@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
#Borland, D., & Taylor, R. M. (2007). Rainbow Color Map (Still) Considered Harmful. Computer Graphics and Applications, IEEE, 27(2), 14--17.
#Moreland, K. (2009). Diverging Color Maps for Scientific Visualization. In Proc. 5th Int. Symp. Visual Computing (pp. 92--103).
outtypes = ['paraview','gmsh','raw','GOM']
extensions = ['.xml','.msh','.txt','.legend']
extensions = ['.json','.msh','.txt','.legend']
colormodels = ['RGB','HSL','XYZ','CIELAB','MSH']
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
@ -34,11 +34,9 @@ parser.add_option('-r','--right', dest='right', type='float', nargs=3, metavar='
parser.add_option('-c','--colormodel', dest='colormodel', metavar='string',
help='colormodel: '+', '.join(colormodels)+' [%default]')
parser.add_option('-p','--predefined', dest='predefined', metavar='string',
help='predefined colormap [%default]')
help='predefined colormap')
parser.add_option('-f','--format', dest='format', metavar='string',
help='output format: '+', '.join(outtypes)+' [%default]')
parser.add_option('-b','--basename', dest='basename', metavar='string',
help='basename of output file [%default]')
parser.set_defaults(colormodel = 'RGB')
parser.set_defaults(predefined = None)
parser.set_defaults(basename = None)
@ -48,7 +46,7 @@ parser.set_defaults(trim = (-1.0,1.0))
parser.set_defaults(left = (1.0,1.0,1.0))
parser.set_defaults(right = (0.0,0.0,0.0))
(options,filenames) = parser.parse_args()
(options,filename) = parser.parse_args()
if options.format not in outtypes:
parser.error('invalid format: "%s" (can be %s).'%(options.format,', '.join(outtypes)))
@ -62,10 +60,10 @@ if options.trim[0] < -1.0 or \
parser.error('invalid trim range (-1 +1).')
name = options.format if options.basename is None\
else options.basename
output = sys.stdout if options.basename is None\
else open(os.path.basename(options.basename)+extensions[outtypes.index(options.format)],'w')
name = options.format if filename[0] is None\
else filename[0]
output = sys.stdout if filename[0] is None\
else open(os.path.basename(filename[0])+extensions[outtypes.index(options.format)],'w')
colorLeft = damask.Color(options.colormodel.upper(), list(options.left))
colorRight = damask.Color(options.colormodel.upper(), list(options.right))

View File

@ -1,206 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate geometry description and material configuration from EBSD data in given square-gridded 'ang' file.
Two phases can be discriminated based on threshold value in a given data column.
""", version = scriptID)
parser.add_option('--column', dest='column', type='int', metavar = 'int',
help='data column to discriminate between both phases [%default]')
parser.add_option('-t','--threshold', dest='threshold', type='float', metavar = 'float',
help='threshold value for phase discrimination [%default]')
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int',
help='homogenization index for <microstructure> configuration [%default]')
parser.add_option('--phase', dest='phase', type='int', nargs = 2, metavar = 'int int',
help='phase indices for <microstructure> configuration %default')
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
help='crystallite index for <microstructure> configuration [%default]')
parser.add_option('-c','--compress', dest='compress', action='store_true',
help='lump identical microstructure and texture information [%default]')
parser.add_option('-a', '--axes', dest='axes', nargs = 3, metavar = 'string string string',
help='Euler angle coordinate system for <texture> configuration x,y,z = %default')
parser.add_option('-p', '--precision', dest='precision', choices=['0','1','2','3'], metavar = 'int',
help = 'euler angles decimal places for output format and compressing (0,1,2,3) [2]')
parser.set_defaults(column = 11,
threshold = 0.5,
homogenization = 1,
phase = [1,2],
crystallite = 1,
compress = False,
axes = ['y','x','-z'],
precision = '2',
)
(options,filenames) = parser.parse_args()
for i in options.axes:
if i.lower() not in ['x','+x','-x','y','+y','-y','z','+z','-z']:
parser.error('invalid axes %s %s %s' %(options.axes[0],options.axes[1],options.axes[2]))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
info = {
'grid': np.ones(3,'i'),
'size': np.zeros(3,'d'),
'origin': np.zeros(3,'d'),
'microstructures': 0,
'homogenization': options.homogenization
}
coords = [{},{},{1:True}]
pos = {'min':[ float("inf"), float("inf")],
'max':[-float("inf"),-float("inf")]}
phase = []
eulerangles = []
# --------------- read data -----------------------------------------------------------------------
errors = []
while table.data_read():
words = table.data
if words[0] == '#': # process initial comments/header block
if len(words) > 2:
if words[2].lower() == 'hexgrid':
errors.append('The file has HexGrid format. Please first convert to SquareGrid...')
break
else:
currPos = words[3:5]
for i in xrange(2):
coords[i][currPos[i]] = True
currPos = map(float,currPos)
for i in xrange(2):
pos['min'][i] = min(pos['min'][i],currPos[i])
pos['max'][i] = max(pos['max'][i],currPos[i])
eulerangles.append(map(math.degrees,map(float,words[:3])))
phase.append(options.phase[int(float(words[options.column-1]) > options.threshold)])
if errors != []:
damask.util.croak(errors)
continue
# --------------- determine size and grid ---------------------------------------------------------
info['grid'] = np.array(map(len,coords),'i')
info['size'][0:2] = info['grid'][0:2]/(info['grid'][0:2]-1.0)* \
np.array([pos['max'][0]-pos['min'][0],
pos['max'][1]-pos['min'][1]],'d')
info['size'][2]=info['size'][0]/info['grid'][0]
eulerangles = np.array(eulerangles,dtype='f').reshape(info['grid'].prod(),3)
phase = np.array(phase,dtype='i').reshape(info['grid'].prod())
limits = [360,180,360]
if any([np.any(eulerangles[:,i]>=limits[i]) for i in [0,1,2]]):
errors.append('Error: euler angles out of bound. Ang file might contain unidexed poins.')
for i,angle in enumerate(['phi1','PHI','phi2']):
for n in np.nditer(np.where(eulerangles[:,i]>=limits[i]),['zerosize_ok']):
errors.append('%s in line %i (%4.2f %4.2f %4.2f)\n'
%(angle,n,eulerangles[n,0],eulerangles[n,1],eulerangles[n,2]))
if errors != []:
damask.util.croak(errors)
continue
eulerangles=np.around(eulerangles,int(options.precision)) # round to desired precision
# ensure, that rounded euler angles are not out of bounds (modulo by limits)
for i,angle in enumerate(['phi1','PHI','phi2']):
eulerangles[:,i]%=limits[i]
# scale angles by desired precision and convert to int. create unique integer key from three euler angles by
# concatenating the string representation with leading zeros and store as integer and search unique euler angle keys.
# Texture IDs are the indices of the first occurrence, the inverse is used to construct the microstructure
# create a microstructure (texture/phase pair) for each point using unique texture IDs.
# Use longInt (64bit, i8) because the keys might be long
if options.compress:
formatString='{0:0>'+str(int(options.precision)+3)+'}'
euleranglesRadInt = (eulerangles*10**int(options.precision)).astype('int')
eulerKeys = np.array([int(''.join(map(formatString.format,euleranglesRadInt[i,:]))) \
for i in xrange(info['grid'].prod())])
devNull, texture, eulerKeys_idx = np.unique(eulerKeys, return_index = True, return_inverse=True)
msFull = np.array([[eulerKeys_idx[i],phase[i]] for i in xrange(info['grid'].prod())],'i8')
devNull,msUnique,matPoints = np.unique(msFull.view('c16'),True,True)
matPoints+=1
microstructure = np.array([msFull[i] for i in msUnique]) # pick only unique microstructures
else:
texture = np.arange(info['grid'].prod())
microstructure = np.hstack( zip(texture,phase) ).reshape(info['grid'].prod(),2) # create texture/phase pairs
formatOut = 1+int(math.log10(len(texture)))
textureOut =['<texture>']
eulerFormatOut='%%%i.%if'%(int(options.precision)+4,int(options.precision))
outStringAngles='(gauss) phi1 '+eulerFormatOut+' Phi '+eulerFormatOut+' phi2 '+eulerFormatOut+' scatter 0.0 fraction 1.0'
for i in xrange(len(texture)):
textureOut += ['[Texture%s]'%str(i+1).zfill(formatOut),
'axes %s %s %s'%(options.axes[0],options.axes[1],options.axes[2]),
outStringAngles%tuple(eulerangles[texture[i],...])
]
formatOut = 1+int(math.log10(len(microstructure)))
microstructureOut =['<microstructure>']
for i in xrange(len(microstructure)):
microstructureOut += ['[Grain%s]'%str(i+1).zfill(formatOut),
'crystallite\t%i'%options.crystallite,
'(constituent)\tphase %i\ttexture %i\tfraction 1.0'%(microstructure[i,1],microstructure[i,0]+1)
]
info['microstructures'] = len(microstructure)
#--- report ---------------------------------------------------------------------------------------
damask.util.croak('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) +
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) +
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) +
'homogenization: %i\n'%info['homogenization'] +
'microstructures: %i\n\n'%info['microstructures'])
if np.any(info['grid'] < 1):
damask.util.croak('invalid grid a b c.\n')
continue
if np.any(info['size'] <= 0.0):
damask.util.croak('invalid size x y z.\n')
continue
#--- write data/header --------------------------------------------------------------------------------
table.info_clear()
table.info_append([' '.join([scriptID] + sys.argv[1:]),
"grid\ta %i\tb %i\tc %i"%(info['grid'][0],info['grid'][1],info['grid'][2],),
"size\tx %f\ty %f\tz %f"%(info['size'][0],info['size'][1],info['size'][2],),
"origin\tx %f\ty %f\tz %f"%(info['origin'][0],info['origin'][1],info['origin'][2],),
"microstructures\t%i"%info['microstructures'],
"homogenization\t%i"%info['homogenization'],
] +
[line for line in microstructureOut + textureOut]
)
table.head_write()
if options.compress:
matPoints = matPoints.reshape((info['grid'][1],info['grid'][0]))
table.data = matPoints
table.data_writeArray('%%%ii'%(1+int(math.log10(np.amax(matPoints)))),delimiter=' ')
else:
table.output_write("1 to %i\n"%(info['microstructures']))
table.output_flush()
# --- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,297 +0,0 @@
#!/usr/bin/env python
##
# This script will read in all the seeds and partition the space
# using scipy.spatial.Delaunay triangulation.
# The unknown location will be then interpolated through Barycentric
# interpolation method, which relies on the triangulation.
# A rim will be automatically added to the patch, which will help
# improve the compatibility with the spectral solver as well as
# maintain meaningful microstructure(reduce artifacts).
import os
import numpy as np
import argparse
from scipy.spatial import Delaunay
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
OFFSET = 0.1 #resize the seeded volume to give space for rim/pan
PHANTOM_ID = -1 #grain ID for phantom seeds
def d_print(info, data, separator=False):
"""quickly print debug information"""
if(separator): print "*"*80
print info
print data
def meshgrid2(*arrs):
"""code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
arrs = tuple(reversed(arrs))
arrs = tuple(arrs)
lens = np.array(map(len, arrs))
dim = len(arrs)
ans = []
for i, arr in enumerate(arrs):
slc = np.ones(dim,'i')
slc[i] = lens[i]
arr2 = np.asarray(arr).reshape(slc)
for j, sz in enumerate(lens):
if j != i:
arr2 = arr2.repeat(sz, axis=j)
ans.insert(0,arr2)
return tuple(ans)
#prepare command line interface
parser = argparse.ArgumentParser(prog="geoFromBarycentic",
description='''Generate geom file through \
Barycentric interpolating seeds file.''',
epilog="requires numpy, and scipy.")
parser.add_argument("seeds",
help="seeds file in DAMASK format:\
http://damask.mpie.de/Documentation/AsciiTableFormat",
default="test.seeds")
parser.add_argument("-v", "--version",
action="version",
version="%(prog)s 0.1")
parser.add_argument("-g", "--grid",
nargs=3,
help="grid size(mesh resolution, recommend using 2^x)",
default=[32,32,32],
type=int)
parser.add_argument("-s", "--size",
help="physical size of the target volume.",
nargs=3,
default=[1.0,1.0,1.0],
type=float)
parser.add_argument("-o", "--origin",
help="lower left corner of the patch.",
nargs=3,
default=[0.0,0.0,0.0],
type=float)
parser.add_argument('-m', '--homogenization',
help='homogenization index to be used',
default=1,
type=int)
parser.add_argument('-c', '--crystallite',
help='crystallite index to be used',
default=1,
type=int)
parser.add_argument('-p', '--phase',
help='phase index to be used',
default=1,
type=int)
parser.add_argument('-F', '--Favg',
help='reshape the periodicity, not useful for RIM method',
nargs=9,
default=[1.0,0.0,0.0,
0.0,1.0,0.0,
0.0,0.0,1.0],
type=float)
parser.add_argument("-G", "--geomFile",
help='the name of the output geom file',
default='seeds.geom',
type=str)
parser.add_argument("-C", "--configFile",
help='output dummy material.config file',
action='store_true',
default=False)
parser.add_argument("-d", "--debug",
help="start debugging script",
action='store_true',
default=False)
parser.add_argument("-S", "--seedsFile",
help="write out resized seeds file",
action='store_true',
default=False)
parser.add_argument("-r", '--addRim',
help="add rim and provide control of face lifting point",
action='store_true',
default=False)
args = parser.parse_args() # get all the arguments right after
#quick help to user
print "*"*80
parser.print_help()
print """Sample usage:
./geoFromBarycentic.py 20grains.seeds -g 128 128 128 -S -r; geom_check seeds.geom; seeds_check new_seed.seeds.
"""
print "*"*80
if (args.debug):
d_print("args are:", parser.parse_args(),separator=True)
#/\/\/\/\/#
# m a i n #
#\/\/\/\/\#
print "only work for 3D case now, 2D support coming soon..."
print "reading seeds file: {}".format(args.seeds)
with open(args.seeds, 'r') as f:
rawtext = f.readlines()
n_header = int(rawtext.pop(0).split()[0])
#record all the seeds position
if (args.addRim):
grid_shift = np.array(args.size) * np.array([OFFSET,OFFSET,OFFSET*2])
s_coords = np.array([[np.array(float(item))*(1 - OFFSET*2)
for item in line.split()[:3]] + grid_shift
for line in rawtext[n_header:]])
else:
#no need for shifting with periodicity
s_coords = np.array([[np.array(float(item))
for item in line.split()[:3]]
for line in rawtext[n_header:]])
#record ID of the seeds: int/EulerAngles
if 'microstructure' in rawtext[n_header-1]:
s_id = [int(line.split()[-1]) for line in rawtext[n_header:]]
else:
print "WARNING:"
print "THIS SCRIPT DOES NOT UNDERSTAND HOW TO GROUP CRYSTALLITES."
print "ALL CRYSTAL ORIENTATIONS ARE CONSIDERED TO BE UNIQUE."
print "FOR MORE ACCURATE CONTROL OF SEEDS GROUPING, USE MICROSTRUCTURE ID."
s_id = range(len(s_coords))
#s_eulers here is just a quick book keeping
s_eulers = np.array([[float(item) for item in line.split()[3:]]
for line in rawtext[n_header:]])
if(args.debug):
print d_print("resize point cloud to make space for rim/pan:",
s_coords)
if(args.addRim):
#add binding box to create rim/pan for the volume where the ID of the seeds is
#unknown
print "Shrining the seeds to {}x in each direction".format(1 - OFFSET*2)
x,y,z = args.size[0],args.size[1],args.size[2]
print "Use circumscribed sphere to place phantom seeds."
r = np.sqrt(x**2+y**2+z**2)/2.0
BINDBOX = [[0,0,0],[x,0,0],[0,y,0],[x,y,0],
[0,0,z],[x,0,z],[0,y,z],[x,y,z],
[x/2.0+r,y/2, z/2], [x/2.0-r, y/2, z/2],
[x/2, y/2.0+r, z/2], [x/2, y/2.0-r, z/2],
[x/2, y/2, z/2.0-r]] #8 corners + 5 face centers (no top)
print "Adding phantom seeds for RIM generation:"
for point in BINDBOX:
print point
s_coords = np.vstack([s_coords,point])
s_id.append(PHANTOM_ID)
else:
#The idea here is that we read in each seed point, than duplicate in 3D (make a few copies),
#move on to the next seed point, repeat the same procedure. As for the ID list, we can just use the
#same one. The trick here is use the floor division to find the correct id since we pretty much duplicate
#the same point several times.
Favg = np.array(args.Favg).reshape((3,3))
x,y,z = args.size[0],args.size[1],args.size[2]
tmp = []
for seed in s_coords:
tmp += [np.dot(Favg, np.array(seed) + np.array([dx,dy,dz]))
for dz in [-z, 0, z]
for dy in [-y, 0, y]
for dx in [-x, 0, x]]
s_coords = tmp
if (args.seedsFile):
with open("new_seed.seeds", "w") as f:
outstr = "4\theader\n"
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
args.grid[1],
args.grid[2])
outstr += "microstructures {}\n".format(len(set(s_id)))
outstr += "x\ty\tz\tmicrostructure"
if (args.addRim):
for i in range(len(s_id)):
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
s_coords[i][1],
s_coords[i][2],
s_id[i])
else:
for i in range(len(s_coords)):
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
s_coords[i][1],
s_coords[i][2],
s_id[i//3**3])
f.write(outstr)
#triangulate space with given point-clouds
tri = Delaunay(s_coords)
if(args.debug):
d_print("simplices:", tri.simplices, separator=True)
d_print("vertices:", s_coords[tri.simplices])
#populate grid points (only 3D for now)
'''
#populating grid using meshgrid2
x = (np.arange(args.grid[0])+0.5)*args.size[0]/args.grid[0]
y = (np.arange(args.grid[1])+0.5)*args.size[1]/args.grid[1]
z = (np.arange(args.grid[2])+0.5)*args.size[2]/args.grid[2]
mesh_pts = np.transpose(np.vstack(map(np.ravel, meshgrid2(x, y, z))))
print mesh_pts
'''
#this is actually faster than using meshgrid2
mesh_pts = [[(i+0.5)*args.size[0]/args.grid[0],
(j+0.5)*args.size[1]/args.grid[1],
(k+0.5)*args.size[2]/args.grid[2]]
for k in range(args.grid[2])
for j in range(args.grid[1])
for i in range(args.grid[0])]
mesh_ids = [PHANTOM_ID*2]*len(mesh_pts) #initialize grid
#search ID for each grid point
s_id = np.array(s_id) #allow multi-indexing
mesh_idx = tri.find_simplex(mesh_pts)
for i, pt in enumerate(mesh_pts):
if mesh_idx[i] < 0:
continue #didn't find any envelop tetrahedron --> something wrong!
#calculate Barycentric coordinates
bary_c = tri.transform[mesh_idx[i],:3,:3].dot(pt-tri.transform[mesh_idx[i],3,:])
bary_c = np.append(bary_c, 1 - bary_c.sum())
if (args.addRim):
tmp_ids = s_id[tri.simplices[mesh_idx[i]]] #rim method
else:
tmp_ids = np.array(s_id[tri.simplices[mesh_idx[i]]//(3**3)]) #kill periodicity through floor division
#print tmp_ids
#print tri.simplices[mesh_idx[i]]//(3**3)
max_weight = -1960
for this_id in tmp_ids:
msk = [item==this_id for item in tmp_ids] #find vertex with the same id
tmp_weight = sum([bary_c[j] for j in range(len(bary_c)) if msk[j]])
if tmp_weight > max_weight:
max_weight = tmp_weight
mesh_ids[i] = this_id
if (args.debug):
d_print("bary_c:",bary_c,separator=True)
d_print("vertex ID:", tmp_ids)
d_print("final ID:", mesh_ids[i])
mesh_ids = np.reshape(mesh_ids, (-1, args.grid[0]))
#write to file
with open(args.geomFile, "w") as f:
outstr = "5\theader\n"
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
args.grid[1],
args.grid[2])
outstr += "size\tx {}\ty {}\tz {}\n".format(args.size[0],
args.size[1],
args.size[2])
outstr += "origin\tx {}\ty {}\tz {}\n".format(args.origin[0],
args.origin[1],
args.origin[2])
outstr += "homogenization\t{}\nmicrostructure\t{}\n".format(args.homogenization,
len(set(s_id)))
for row in mesh_ids:
row = [str(item) for item in list(row)]
outstr += "\t".join(row) + "\n"
f.write(outstr)

View File

@ -0,0 +1,55 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Adds header to OIM grain file to make it accesible as ASCII table
""", version = scriptID)
parser.add_option('-l', '--labels',
dest = 'labels',
help = 'lables of requested columns')
parser.set_defaults(labels = ['1_euler','2_euler','3_euler',
'1_pos','2_pos', 'IQ', 'CI', 'Fit', 'GrainID',],
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
table.head_read()
data = []
while table.data_read():
data.append(table.data[0:len(options.labels)])
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(options.labels)
table.head_write()
for i in data:
table.data = i
table.data_write()
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table