Merge branch 'development' into new-ASCII
This commit is contained in:
commit
81abc43920
|
@ -14,7 +14,7 @@ SolidSolutionStrength 1.5e8 # Strength due to elements in solid solution
|
||||||
|
|
||||||
### Dislocation glide parameters ###
|
### Dislocation glide parameters ###
|
||||||
#per family
|
#per family
|
||||||
Nslip 12 0
|
Nslip 12
|
||||||
slipburgers 2.72e-10 # Burgers vector of slip system [m]
|
slipburgers 2.72e-10 # Burgers vector of slip system [m]
|
||||||
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
|
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
|
||||||
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]
|
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]
|
||||||
|
|
|
@ -65,7 +65,7 @@ for filename in options.filenames:
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = results.read_dataset(x,0)
|
array = results.read_dataset(x,0,plain=True)
|
||||||
d = int(np.product(np.shape(array)[1:]))
|
d = int(np.product(np.shape(array)[1:]))
|
||||||
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ for filename in options.filenames:
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = results.read_dataset(x,0)
|
array = results.read_dataset(x,0,plain=True)
|
||||||
d = int(np.product(np.shape(array)[1:]))
|
d = int(np.product(np.shape(array)[1:]))
|
||||||
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
|
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ for name in filenames:
|
||||||
canvas = np.full(new,options.fill if options.fill is not None
|
canvas = np.full(new,options.fill if options.fill is not None
|
||||||
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
||||||
|
|
||||||
l = np.clip( offset, 0,np.minimum(old +offset,new))
|
l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
|
||||||
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
||||||
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
||||||
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
||||||
|
@ -71,7 +71,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -5,9 +5,6 @@ import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
from scipy import ndimage
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,11 +12,6 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
|
||||||
def mostFrequent(arr):
|
|
||||||
unique, inverse = np.unique(arr, return_inverse=True)
|
|
||||||
return unique[np.argmax(np.bincount(inverse))]
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
@ -45,13 +37,6 @@ for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
damask.util.croak(geom.clean(options.stencil))
|
||||||
damask.util.croak(geom.update(ndimage.filters.generic_filter(
|
|
||||||
geom.microstructure,mostFrequent,
|
|
||||||
size=(options.stencil,)*3).astype(geom.microstructure.dtype)))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -86,7 +86,7 @@ for name in filenames:
|
||||||
* inFile[os.path.join(group_geom,'SPACING')][...]
|
* inFile[os.path.join(group_geom,'SPACING')][...]
|
||||||
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
||||||
origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Geometry data ({}) not found'.format(group_geom))
|
errors.append('Geometry data ({}) not found'.format(group_geom))
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,13 +98,13 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
dataset = os.path.join(group_pointwise,options.phase)
|
dataset = os.path.join(group_pointwise,options.phase)
|
||||||
try:
|
try:
|
||||||
phase = np.reshape(inFile[dataset][...],(np.product(grid)))
|
phase = np.reshape(inFile[dataset][...],(np.product(grid)))
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Pointwise phase data ({}) not readable'.format(dataset))
|
errors.append('Pointwise phase data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
|
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
|
||||||
|
@ -116,7 +116,7 @@ for name in filenames:
|
||||||
dataset = os.path.join(group_pointwise,options.microstructure)
|
dataset = os.path.join(group_pointwise,options.microstructure)
|
||||||
try:
|
try:
|
||||||
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
|
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
|
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
group_average = os.path.join(rootDir,options.basegroup,options.average)
|
group_average = os.path.join(rootDir,options.basegroup,options.average)
|
||||||
|
@ -124,13 +124,13 @@ for name in filenames:
|
||||||
dataset = os.path.join(group_average,options.quaternion)
|
dataset = os.path.join(group_average,options.quaternion)
|
||||||
try:
|
try:
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
dataset = os.path.join(group_average,options.phase)
|
dataset = os.path.join(group_average,options.phase)
|
||||||
try:
|
try:
|
||||||
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
|
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Average phase data ({}) not readable'.format(dataset))
|
errors.append('Average phase data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
@ -155,4 +155,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
geom.to_file(os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
|
|
|
@ -89,7 +89,4 @@ geom=damask.Geom(microstructure,options.size,
|
||||||
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -145,7 +145,4 @@ geom = damask.Geom(microstructure.reshape(grid),
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -63,7 +63,4 @@ geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype)
|
||||||
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
|
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -152,7 +152,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
|
||||||
|
|
|
@ -302,7 +302,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
|
||||||
|
|
|
@ -172,7 +172,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
|
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -5,8 +5,6 @@ import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,35 +36,12 @@ parser.set_defaults(reflect = False)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.directions is None:
|
|
||||||
parser.error('no direction given.')
|
|
||||||
|
|
||||||
if not set(options.directions).issubset(validDirections):
|
|
||||||
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
|
|
||||||
parser.error('invalid directions {}. '.format(*invalidDirections))
|
|
||||||
|
|
||||||
limits = [None,None] if options.reflect else [-2,0]
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
damask.util.croak(geom.mirror(options.directions,options.reflect))
|
||||||
microstructure = geom.get_microstructure()
|
|
||||||
if 'z' in options.directions:
|
|
||||||
microstructure = np.concatenate([microstructure,microstructure[:,:,limits[0]:limits[1]:-1]],2)
|
|
||||||
if 'y' in options.directions:
|
|
||||||
microstructure = np.concatenate([microstructure,microstructure[:,limits[0]:limits[1]:-1,:]],1)
|
|
||||||
if 'x' in options.directions:
|
|
||||||
microstructure = np.concatenate([microstructure,microstructure[limits[0]:limits[1]:-1,:,:]],0)
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(microstructure,rescale=True))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -34,41 +34,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
compressType = None
|
geom.to_file(sys.stdout if name is None else name,pack=True)
|
||||||
former = start = -1
|
|
||||||
reps = 0
|
|
||||||
|
|
||||||
if name is None:
|
|
||||||
f = sys.stdout
|
|
||||||
else:
|
|
||||||
f= open(name,'w')
|
|
||||||
|
|
||||||
for current in geom.microstructure.flatten('F'):
|
|
||||||
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
|
||||||
compressType = 'to'
|
|
||||||
reps += 1
|
|
||||||
elif current == former and start == former:
|
|
||||||
compressType = 'of'
|
|
||||||
reps += 1
|
|
||||||
else:
|
|
||||||
if compressType is None:
|
|
||||||
f.write('\n'.join(geom.get_header())+'\n')
|
|
||||||
elif compressType == '.':
|
|
||||||
f.write('{}\n'.format(former))
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write('{} to {}\n'.format(start,former))
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write('{} of {}\n'.format(reps,former))
|
|
||||||
|
|
||||||
compressType = '.'
|
|
||||||
start = current
|
|
||||||
reps = 1
|
|
||||||
|
|
||||||
former = current
|
|
||||||
|
|
||||||
if compressType == '.':
|
|
||||||
f.write('{}\n'.format(former))
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write('{} to {}\n'.format(start,former))
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write('{} of {}\n'.format(reps,former))
|
|
||||||
|
|
|
@ -5,8 +5,6 @@ import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,15 +30,6 @@ for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
damask.util.croak(geom.renumber())
|
||||||
renumbered = np.empty(geom.get_grid(),dtype=geom.microstructure.dtype)
|
|
||||||
for i, oldID in enumerate(np.unique(geom.microstructure)):
|
|
||||||
renumbered = np.where(geom.microstructure == oldID, i+1, renumbered)
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(renumbered))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -2,11 +2,10 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from scipy import ndimage
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -55,20 +54,7 @@ for name in filenames:
|
||||||
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
|
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
|
||||||
else float(n) for o,n in zip(size,options.size)],dtype=float)
|
else float(n) for o,n in zip(size,options.size)],dtype=float)
|
||||||
|
|
||||||
damask.util.croak(geom.update(microstructure =
|
geom.scale(new_grid)
|
||||||
ndimage.interpolation.zoom(
|
damask.util.croak(geom.update(microstructure = None,size = new_size))
|
||||||
geom.microstructure,
|
|
||||||
new_grid/grid,
|
|
||||||
output=geom.microstructure.dtype,
|
|
||||||
order=0,
|
|
||||||
mode='nearest',
|
|
||||||
prefilter=False,
|
|
||||||
) if np.any(new_grid != grid) \
|
|
||||||
else None,
|
|
||||||
size = new_size))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -95,7 +95,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -58,7 +58,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -34,7 +34,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -82,7 +82,4 @@ for name in filenames:
|
||||||
geom.microstructure + offset,geom.microstructure)))
|
geom.microstructure + offset,geom.microstructure)))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
|
with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
|
||||||
version = f.readline()[1:-1]
|
version = f.readline()[1:-1]
|
||||||
|
|
||||||
name = 'damask'
|
name = 'damask'
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
from collections import Iterable
|
from collections.abc import Iterable
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ except NameError:
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
class ASCIItable():
|
class ASCIItable():
|
||||||
"""Read and write to ASCII tables"""
|
"""Read and write to ASCII tables."""
|
||||||
|
|
||||||
tmpext = '_tmp' # filename extension for in-place access
|
tmpext = '_tmp' # filename extension for in-place access
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ class ASCIItable():
|
||||||
labeled = True, # assume table has labels
|
labeled = True, # assume table has labels
|
||||||
readonly = False, # no reading from file
|
readonly = False, # no reading from file
|
||||||
):
|
):
|
||||||
|
"""Read and write to ASCII tables."""
|
||||||
self.__IO__ = {'output': [],
|
self.__IO__ = {'output': [],
|
||||||
'buffered': buffered,
|
'buffered': buffered,
|
||||||
'labeled': labeled, # header contains labels
|
'labeled': labeled, # header contains labels
|
||||||
|
@ -72,7 +73,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def _removeCRLF(self,
|
def _removeCRLF(self,
|
||||||
string):
|
string):
|
||||||
"""Delete any carriage return and line feed from string"""
|
"""Delete any carriage return and line feed from string."""
|
||||||
try:
|
try:
|
||||||
return string.replace('\n','').replace('\r','')
|
return string.replace('\n','').replace('\r','')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -82,7 +83,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def _quote(self,
|
def _quote(self,
|
||||||
what):
|
what):
|
||||||
"""Quote empty or white space-containing output"""
|
"""Quote empty or white space-containing output."""
|
||||||
return '{quote}{content}{quote}'.format(
|
return '{quote}{content}{quote}'.format(
|
||||||
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
|
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
|
||||||
content = what)
|
content = what)
|
||||||
|
@ -103,7 +104,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def output_write(self,
|
def output_write(self,
|
||||||
what):
|
what):
|
||||||
"""Aggregate a single row (string) or list of (possibly containing further lists of) rows into output"""
|
"""Aggregate a single row (string) or list of (possibly containing further lists of) rows into output."""
|
||||||
if isinstance(what, (str, unicode)):
|
if isinstance(what, (str, unicode)):
|
||||||
self.__IO__['output'] += [what]
|
self.__IO__['output'] += [what]
|
||||||
else:
|
else:
|
||||||
|
@ -143,7 +144,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_read(self):
|
def head_read(self):
|
||||||
"""
|
"""
|
||||||
Get column labels
|
Get column labels.
|
||||||
|
|
||||||
by either reading the first row or,
|
by either reading the first row or,
|
||||||
if keyword "head[*]" is present, the last line of the header
|
if keyword "head[*]" is present, the last line of the header
|
||||||
|
@ -154,7 +155,7 @@ class ASCIItable():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
firstline = self.__IO__['in'].readline().strip()
|
firstline = self.__IO__['in'].readline().strip()
|
||||||
m = re.search('(\d+)\s+head', firstline.lower()) # search for "head" keyword
|
m = re.search(r'(\d+)\s+head', firstline.lower()) # search for "head" keyword
|
||||||
|
|
||||||
if m: # proper ASCIItable format
|
if m: # proper ASCIItable format
|
||||||
|
|
||||||
|
@ -194,7 +195,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_write(self,
|
def head_write(self,
|
||||||
header = True):
|
header = True):
|
||||||
"""Write current header information (info + labels)"""
|
"""Write current header information (info + labels)."""
|
||||||
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
||||||
head.append(self.info)
|
head.append(self.info)
|
||||||
if self.__IO__['labeled']:
|
if self.__IO__['labeled']:
|
||||||
|
@ -205,7 +206,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_getGeom(self):
|
def head_getGeom(self):
|
||||||
"""Interpret geom header"""
|
"""Interpret geom header."""
|
||||||
identifiers = {
|
identifiers = {
|
||||||
'grid': ['a','b','c'],
|
'grid': ['a','b','c'],
|
||||||
'size': ['x','y','z'],
|
'size': ['x','y','z'],
|
||||||
|
@ -247,7 +248,7 @@ class ASCIItable():
|
||||||
def labels_append(self,
|
def labels_append(self,
|
||||||
what,
|
what,
|
||||||
reset = False):
|
reset = False):
|
||||||
"""Add item or list to existing set of labels (and switch on labeling)"""
|
"""Add item or list to existing set of labels (and switch on labeling)."""
|
||||||
if isinstance(what, (str, unicode)):
|
if isinstance(what, (str, unicode)):
|
||||||
self.tags += [self._removeCRLF(what)]
|
self.tags += [self._removeCRLF(what)]
|
||||||
else:
|
else:
|
||||||
|
@ -261,7 +262,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def labels_clear(self):
|
def labels_clear(self):
|
||||||
"""Delete existing labels and switch to no labeling"""
|
"""Delete existing labels and switch to no labeling."""
|
||||||
self.tags = []
|
self.tags = []
|
||||||
self.__IO__['labeled'] = False
|
self.__IO__['labeled'] = False
|
||||||
|
|
||||||
|
@ -392,7 +393,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_append(self,
|
def info_append(self,
|
||||||
what):
|
what):
|
||||||
"""Add item or list to existing set of infos"""
|
"""Add item or list to existing set of infos."""
|
||||||
if isinstance(what, (str, unicode)):
|
if isinstance(what, (str, unicode)):
|
||||||
self.info += [self._removeCRLF(what)]
|
self.info += [self._removeCRLF(what)]
|
||||||
else:
|
else:
|
||||||
|
@ -403,7 +404,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_clear(self):
|
def info_clear(self):
|
||||||
"""Delete any info block"""
|
"""Delete any info block."""
|
||||||
self.info = []
|
self.info = []
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
@ -416,7 +417,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_skipLines(self,
|
def data_skipLines(self,
|
||||||
count):
|
count):
|
||||||
"""Wind forward by count number of lines"""
|
"""Wind forward by count number of lines."""
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
alive = self.data_read()
|
alive = self.data_read()
|
||||||
|
|
||||||
|
@ -426,7 +427,7 @@ class ASCIItable():
|
||||||
def data_read(self,
|
def data_read(self,
|
||||||
advance = True,
|
advance = True,
|
||||||
respectLabels = True):
|
respectLabels = True):
|
||||||
"""Read next line (possibly buffered) and parse it into data array"""
|
"""Read next line (possibly buffered) and parse it into data array."""
|
||||||
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
||||||
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
||||||
|
|
||||||
|
@ -446,9 +447,11 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_readArray(self,
|
def data_readArray(self,
|
||||||
labels = []):
|
labels = []):
|
||||||
"""Read whole data of all (given) labels as numpy array"""
|
"""Read whole data of all (given) labels as numpy array."""
|
||||||
try: self.data_rewind() # try to wind back to start of data
|
try:
|
||||||
except: pass # assume/hope we are at data start already...
|
self.data_rewind() # try to wind back to start of data
|
||||||
|
except IOError:
|
||||||
|
pass # assume/hope we are at data start already...
|
||||||
|
|
||||||
if labels is None or labels == []:
|
if labels is None or labels == []:
|
||||||
use = None # use all columns (and keep labels intact)
|
use = None # use all columns (and keep labels intact)
|
||||||
|
@ -480,7 +483,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_write(self,
|
def data_write(self,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
"""Write current data array and report alive output back"""
|
"""Write current data array and report alive output back."""
|
||||||
if len(self.data) == 0: return True
|
if len(self.data) == 0: return True
|
||||||
|
|
||||||
if isinstance(self.data[0],list):
|
if isinstance(self.data[0],list):
|
||||||
|
@ -492,16 +495,16 @@ class ASCIItable():
|
||||||
def data_writeArray(self,
|
def data_writeArray(self,
|
||||||
fmt = None,
|
fmt = None,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
"""Write whole numpy array data"""
|
"""Write whole numpy array data."""
|
||||||
for row in self.data:
|
for row in self.data:
|
||||||
try:
|
try:
|
||||||
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
||||||
except:
|
except Exception:
|
||||||
output = [fmt % row] if fmt else [repr(row)]
|
output = [fmt % row] if fmt else [repr(row)]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.__IO__['out'].write(delimiter.join(output) + '\n')
|
self.__IO__['out'].write(delimiter.join(output) + '\n')
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
@ -545,7 +548,7 @@ class ASCIItable():
|
||||||
grid,
|
grid,
|
||||||
type = 'i',
|
type = 'i',
|
||||||
strict = False):
|
strict = False):
|
||||||
"""Read microstructure data (from .geom format)"""
|
"""Read microstructure data (from .geom format)."""
|
||||||
def datatype(item):
|
def datatype(item):
|
||||||
return int(item) if type.lower() == 'i' else float(item)
|
return int(item) if type.lower() == 'i' else float(item)
|
||||||
|
|
||||||
|
|
|
@ -369,7 +369,7 @@ class DADF5():
|
||||||
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
|
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
|
||||||
|
|
||||||
|
|
||||||
def read_dataset(self,path,c):
|
def read_dataset(self,path,c=0,plain=False):
|
||||||
"""
|
"""
|
||||||
Dataset for all points/cells.
|
Dataset for all points/cells.
|
||||||
|
|
||||||
|
@ -402,7 +402,10 @@ class DADF5():
|
||||||
a=a.reshape([a.shape[0],1])
|
a=a.reshape([a.shape[0],1])
|
||||||
dataset[p,:] = a[u,:]
|
dataset[p,:] = a[u,:]
|
||||||
|
|
||||||
return dataset
|
if plain and dataset.dtype.names is not None:
|
||||||
|
return dataset.view(('float64',len(dataset.dtype.names)))
|
||||||
|
else:
|
||||||
|
return dataset
|
||||||
|
|
||||||
|
|
||||||
def cell_coordinates(self):
|
def cell_coordinates(self):
|
||||||
|
@ -620,7 +623,7 @@ class DADF5():
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.deviator(x['data']),
|
'data': mechanics.deviatoric_part(x['data']),
|
||||||
'label': 's_{}'.format(x['label']),
|
'label': 's_{}'.format(x['label']),
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': x['meta']['Unit'],
|
'Unit': x['meta']['Unit'],
|
||||||
|
|
|
@ -2,6 +2,7 @@ import os
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from scipy import ndimage
|
||||||
import vtk
|
import vtk
|
||||||
from vtk.util import numpy_support
|
from vtk.util import numpy_support
|
||||||
|
|
||||||
|
@ -10,380 +11,517 @@ from . import version
|
||||||
|
|
||||||
|
|
||||||
class Geom():
|
class Geom():
|
||||||
"""Geometry definition for grid solvers."""
|
"""Geometry definition for grid solvers."""
|
||||||
|
|
||||||
def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]):
|
def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]):
|
||||||
"""
|
"""
|
||||||
New geometry definition from array of microstructures and size.
|
New geometry definition from array of microstructures and size.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
microstructure : numpy.ndarray
|
microstructure : numpy.ndarray
|
||||||
microstructure array (3D)
|
microstructure array (3D)
|
||||||
size : list or numpy.ndarray
|
size : list or numpy.ndarray
|
||||||
physical size of the microstructure in meter.
|
physical size of the microstructure in meter.
|
||||||
origin : list or numpy.ndarray, optional
|
origin : list or numpy.ndarray, optional
|
||||||
physical origin of the microstructure in meter.
|
physical origin of the microstructure in meter.
|
||||||
homogenization : integer, optional
|
homogenization : integer, optional
|
||||||
homogenization index.
|
homogenization index.
|
||||||
comments : list of str, optional
|
comments : list of str, optional
|
||||||
comments lines.
|
comments lines.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.__transforms__ = \
|
self.set_microstructure(microstructure)
|
||||||
self.set_microstructure(microstructure)
|
self.set_size(size)
|
||||||
self.set_size(size)
|
self.set_origin(origin)
|
||||||
self.set_origin(origin)
|
self.set_homogenization(homogenization)
|
||||||
self.set_homogenization(homogenization)
|
self.set_comments(comments)
|
||||||
self.set_comments(comments)
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
def __repr__(self):
|
"""Basic information on geometry definition."""
|
||||||
"""Basic information on geometry definition."""
|
return util.srepr([
|
||||||
return util.srepr([
|
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
|
||||||
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
|
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
|
||||||
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
|
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
|
||||||
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
|
'homogenization: {}'.format(self.get_homogenization()),
|
||||||
'homogenization: {}'.format(self.get_homogenization()),
|
'# microstructures: {}'.format(len(np.unique(self.microstructure))),
|
||||||
'# microstructures: {}'.format(len(np.unique(self.microstructure))),
|
'max microstructure: {}'.format(np.nanmax(self.microstructure)),
|
||||||
'max microstructure: {}'.format(np.nanmax(self.microstructure)),
|
])
|
||||||
])
|
|
||||||
|
def update(self,microstructure=None,size=None,origin=None,rescale=False):
|
||||||
def update(self,microstructure=None,size=None,origin=None,rescale=False):
|
"""
|
||||||
"""
|
Updates microstructure and size.
|
||||||
Updates microstructure and size.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
microstructure : numpy.ndarray, optional
|
||||||
microstructure : numpy.ndarray, optional
|
microstructure array (3D).
|
||||||
microstructure array (3D).
|
size : list or numpy.ndarray, optional
|
||||||
size : list or numpy.ndarray, optional
|
physical size of the microstructure in meter.
|
||||||
physical size of the microstructure in meter.
|
origin : list or numpy.ndarray, optional
|
||||||
origin : list or numpy.ndarray, optional
|
physical origin of the microstructure in meter.
|
||||||
physical origin of the microstructure in meter.
|
rescale : bool, optional
|
||||||
rescale : bool, optional
|
ignore size parameter and rescale according to change of grid points.
|
||||||
ignore size parameter and rescale according to change of grid points.
|
|
||||||
|
"""
|
||||||
"""
|
grid_old = self.get_grid()
|
||||||
grid_old = self.get_grid()
|
size_old = self.get_size()
|
||||||
size_old = self.get_size()
|
origin_old = self.get_origin()
|
||||||
origin_old = self.get_origin()
|
unique_old = len(np.unique(self.microstructure))
|
||||||
unique_old = len(np.unique(self.microstructure))
|
max_old = np.nanmax(self.microstructure)
|
||||||
max_old = np.nanmax(self.microstructure)
|
|
||||||
|
if size is not None and rescale:
|
||||||
if size is not None and rescale:
|
raise ValueError('Either set size explicitly or rescale automatically')
|
||||||
raise ValueError('Either set size explicitly or rescale automatically')
|
|
||||||
|
self.set_microstructure(microstructure)
|
||||||
self.set_microstructure(microstructure)
|
self.set_origin(origin)
|
||||||
self.set_origin(origin)
|
|
||||||
|
if size is not None:
|
||||||
if size is not None:
|
self.set_size(size)
|
||||||
self.set_size(size)
|
elif rescale:
|
||||||
elif rescale:
|
self.set_size(self.get_grid()/grid_old*self.size)
|
||||||
self.set_size(self.get_grid()/grid_old*self.size)
|
|
||||||
|
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
|
||||||
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
|
if np.any(grid_old != self.get_grid()):
|
||||||
if np.any(grid_old != self.get_grid()):
|
message[-1] = util.delete(message[-1])
|
||||||
message[-1] = util.delete(message[-1])
|
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid())))))
|
||||||
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid())))))
|
|
||||||
|
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
|
||||||
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
|
if np.any(size_old != self.get_size()):
|
||||||
if np.any(size_old != self.get_size()):
|
message[-1] = util.delete(message[-1])
|
||||||
message[-1] = util.delete(message[-1])
|
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size())))))
|
||||||
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size())))))
|
|
||||||
|
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
|
||||||
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
|
if np.any(origin_old != self.get_origin()):
|
||||||
if np.any(origin_old != self.get_origin()):
|
message[-1] = util.delete(message[-1])
|
||||||
message[-1] = util.delete(message[-1])
|
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin())))))
|
||||||
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin())))))
|
|
||||||
|
message.append('homogenization: {}'.format(self.get_homogenization()))
|
||||||
message.append('homogenization: {}'.format(self.get_homogenization()))
|
|
||||||
|
message.append('# microstructures: {}'.format(unique_old))
|
||||||
message.append('# microstructures: {}'.format(unique_old))
|
if unique_old != len(np.unique(self.microstructure)):
|
||||||
if unique_old != len(np.unique(self.microstructure)):
|
message[-1] = util.delete(message[-1])
|
||||||
message[-1] = util.delete(message[-1])
|
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure)))))
|
||||||
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure)))))
|
|
||||||
|
message.append('max microstructure: {}'.format(max_old))
|
||||||
message.append('max microstructure: {}'.format(max_old))
|
if max_old != np.nanmax(self.microstructure):
|
||||||
if max_old != np.nanmax(self.microstructure):
|
message[-1] = util.delete(message[-1])
|
||||||
message[-1] = util.delete(message[-1])
|
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
|
||||||
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
|
|
||||||
|
return util.return_message(message)
|
||||||
return util.return_message(message)
|
|
||||||
|
def set_comments(self,comments):
|
||||||
def set_comments(self,comments):
|
"""
|
||||||
"""
|
Replaces all existing comments.
|
||||||
Replaces all existing comments.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
comments : list of str
|
||||||
comments : list of str
|
new comments.
|
||||||
new comments.
|
|
||||||
|
"""
|
||||||
"""
|
self.comments = []
|
||||||
self.comments = []
|
self.add_comments(comments)
|
||||||
self.add_comments(comments)
|
|
||||||
|
def add_comments(self,comments):
|
||||||
def add_comments(self,comments):
|
"""
|
||||||
"""
|
Appends comments to existing comments.
|
||||||
Appends comments to existing comments.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
comments : list of str
|
||||||
comments : list of str
|
new comments.
|
||||||
new comments.
|
|
||||||
|
"""
|
||||||
"""
|
self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
||||||
self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
|
||||||
|
def set_microstructure(self,microstructure):
|
||||||
def set_microstructure(self,microstructure):
|
"""
|
||||||
"""
|
Replaces the existing microstructure representation.
|
||||||
Replaces the existing microstructure representation.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
microstructure : numpy.ndarray
|
||||||
microstructure : numpy.ndarray
|
microstructure array (3D).
|
||||||
microstructure array (3D).
|
|
||||||
|
"""
|
||||||
"""
|
if microstructure is not None:
|
||||||
if microstructure is not None:
|
if len(microstructure.shape) != 3:
|
||||||
if len(microstructure.shape) != 3:
|
raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape))
|
||||||
raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape))
|
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
||||||
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
|
||||||
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
|
else:
|
||||||
else:
|
self.microstructure = np.copy(microstructure)
|
||||||
self.microstructure = np.copy(microstructure)
|
|
||||||
|
def set_size(self,size):
|
||||||
def set_size(self,size):
|
"""
|
||||||
"""
|
Replaces the existing size information.
|
||||||
Replaces the existing size information.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
size : list or numpy.ndarray
|
||||||
size : list or numpy.ndarray
|
physical size of the microstructure in meter.
|
||||||
physical size of the microstructure in meter.
|
|
||||||
|
"""
|
||||||
"""
|
if size is None:
|
||||||
if size is None:
|
grid = np.asarray(self.microstructure.shape)
|
||||||
grid = np.asarray(self.microstructure.shape)
|
self.size = grid/np.max(grid)
|
||||||
self.size = grid/np.max(grid)
|
else:
|
||||||
else:
|
if len(size) != 3 or any(np.array(size)<=0):
|
||||||
if len(size) != 3 or any(np.array(size)<=0):
|
raise ValueError('Invalid size {}'.format(*size))
|
||||||
raise ValueError('Invalid size {}'.format(*size))
|
else:
|
||||||
else:
|
self.size = np.array(size)
|
||||||
self.size = np.array(size)
|
|
||||||
|
def set_origin(self,origin):
|
||||||
def set_origin(self,origin):
|
"""
|
||||||
"""
|
Replaces the existing origin information.
|
||||||
Replaces the existing origin information.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
origin : list or numpy.ndarray
|
||||||
origin : list or numpy.ndarray
|
physical origin of the microstructure in meter
|
||||||
physical origin of the microstructure in meter
|
|
||||||
|
"""
|
||||||
"""
|
if origin is not None:
|
||||||
if origin is not None:
|
if len(origin) != 3:
|
||||||
if len(origin) != 3:
|
raise ValueError('Invalid origin {}'.format(*origin))
|
||||||
raise ValueError('Invalid origin {}'.format(*origin))
|
else:
|
||||||
else:
|
self.origin = np.array(origin)
|
||||||
self.origin = np.array(origin)
|
|
||||||
|
def set_homogenization(self,homogenization):
|
||||||
def set_homogenization(self,homogenization):
|
"""
|
||||||
"""
|
Replaces the existing homogenization index.
|
||||||
Replaces the existing homogenization index.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
homogenization : integer
|
||||||
homogenization : integer
|
homogenization index
|
||||||
homogenization index
|
|
||||||
|
"""
|
||||||
"""
|
if homogenization is not None:
|
||||||
if homogenization is not None:
|
if not isinstance(homogenization,int) or homogenization < 1:
|
||||||
if not isinstance(homogenization,int) or homogenization < 1:
|
raise TypeError('Invalid homogenization {}'.format(homogenization))
|
||||||
raise TypeError('Invalid homogenization {}'.format(homogenization))
|
else:
|
||||||
else:
|
self.homogenization = homogenization
|
||||||
self.homogenization = homogenization
|
|
||||||
|
|
||||||
|
def get_microstructure(self):
|
||||||
def get_microstructure(self):
|
"""Return the microstructure representation."""
|
||||||
"""Return the microstructure representation."""
|
return np.copy(self.microstructure)
|
||||||
return np.copy(self.microstructure)
|
|
||||||
|
def get_size(self):
|
||||||
def get_size(self):
|
"""Return the physical size in meter."""
|
||||||
"""Return the physical size in meter."""
|
return np.copy(self.size)
|
||||||
return np.copy(self.size)
|
|
||||||
|
def get_origin(self):
|
||||||
def get_origin(self):
|
"""Return the origin in meter."""
|
||||||
"""Return the origin in meter."""
|
return np.copy(self.origin)
|
||||||
return np.copy(self.origin)
|
|
||||||
|
def get_grid(self):
|
||||||
def get_grid(self):
|
"""Return the grid discretization."""
|
||||||
"""Return the grid discretization."""
|
return np.array(self.microstructure.shape)
|
||||||
return np.array(self.microstructure.shape)
|
|
||||||
|
def get_homogenization(self):
|
||||||
def get_homogenization(self):
|
"""Return the homogenization index."""
|
||||||
"""Return the homogenization index."""
|
return self.homogenization
|
||||||
return self.homogenization
|
|
||||||
|
def get_comments(self):
|
||||||
def get_comments(self):
|
"""Return the comments."""
|
||||||
"""Return the comments."""
|
return self.comments[:]
|
||||||
return self.comments[:]
|
|
||||||
|
def get_header(self):
|
||||||
def get_header(self):
|
"""Return the full header (grid, size, origin, homogenization, comments)."""
|
||||||
"""Return the full header (grid, size, origin, homogenization, comments)."""
|
header = ['{} header'.format(len(self.comments)+4)] + self.comments
|
||||||
header = ['{} header'.format(len(self.comments)+4)] + self.comments
|
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
|
||||||
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
|
header.append('size x {} y {} z {}'.format(*self.get_size()))
|
||||||
header.append('size x {} y {} z {}'.format(*self.get_size()))
|
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
|
||||||
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
|
header.append('homogenization {}'.format(self.get_homogenization()))
|
||||||
header.append('homogenization {}'.format(self.get_homogenization()))
|
return header
|
||||||
return header
|
|
||||||
|
@classmethod
|
||||||
@classmethod
|
def from_file(cls,fname):
|
||||||
def from_file(cls,fname):
|
"""
|
||||||
"""
|
Reads a geom file.
|
||||||
Reads a geom file.
|
|
||||||
|
Parameters
|
||||||
Parameters
|
----------
|
||||||
----------
|
fname : str or file handle
|
||||||
fname : str or file handle
|
geometry file to read.
|
||||||
geometry file to read.
|
|
||||||
|
"""
|
||||||
"""
|
try:
|
||||||
with (open(fname) if isinstance(fname,str) else fname) as f:
|
f = open(fname)
|
||||||
f.seek(0)
|
except TypeError:
|
||||||
header_length,keyword = f.readline().split()[:2]
|
f = fname
|
||||||
header_length = int(header_length)
|
|
||||||
content = f.readlines()
|
f.seek(0)
|
||||||
|
header_length,keyword = f.readline().split()[:2]
|
||||||
if not keyword.startswith('head') or header_length < 3:
|
header_length = int(header_length)
|
||||||
raise TypeError('Header length information missing or invalid')
|
content = f.readlines()
|
||||||
|
|
||||||
comments = []
|
if not keyword.startswith('head') or header_length < 3:
|
||||||
for i,line in enumerate(content[:header_length]):
|
raise TypeError('Header length information missing or invalid')
|
||||||
items = line.lower().strip().split()
|
|
||||||
key = items[0] if len(items) > 0 else ''
|
comments = []
|
||||||
if key == 'grid':
|
for i,line in enumerate(content[:header_length]):
|
||||||
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
items = line.lower().strip().split()
|
||||||
elif key == 'size':
|
key = items[0] if len(items) > 0 else ''
|
||||||
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
if key == 'grid':
|
||||||
elif key == 'origin':
|
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
||||||
origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
elif key == 'size':
|
||||||
elif key == 'homogenization':
|
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||||
homogenization = int(items[1])
|
elif key == 'origin':
|
||||||
else:
|
origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||||
comments.append(line.strip())
|
elif key == 'homogenization':
|
||||||
|
homogenization = int(items[1])
|
||||||
microstructure = np.empty(grid.prod()) # initialize as flat array
|
else:
|
||||||
i = 0
|
comments.append(line.strip())
|
||||||
for line in content[header_length:]:
|
|
||||||
items = line.split()
|
microstructure = np.empty(grid.prod()) # initialize as flat array
|
||||||
if len(items) == 3:
|
i = 0
|
||||||
if items[1].lower() == 'of':
|
for line in content[header_length:]:
|
||||||
items = np.ones(int(items[0]))*float(items[2])
|
items = line.split()
|
||||||
elif items[1].lower() == 'to':
|
if len(items) == 3:
|
||||||
items = np.linspace(int(items[0]),int(items[2]),
|
if items[1].lower() == 'of':
|
||||||
abs(int(items[2])-int(items[0]))+1,dtype=float)
|
items = np.ones(int(items[0]))*float(items[2])
|
||||||
else: items = list(map(float,items))
|
elif items[1].lower() == 'to':
|
||||||
else: items = list(map(float,items))
|
items = np.linspace(int(items[0]),int(items[2]),
|
||||||
|
abs(int(items[2])-int(items[0]))+1,dtype=float)
|
||||||
microstructure[i:i+len(items)] = items
|
else: items = list(map(float,items))
|
||||||
i += len(items)
|
else: items = list(map(float,items))
|
||||||
|
microstructure[i:i+len(items)] = items
|
||||||
if i != grid.prod():
|
i += len(items)
|
||||||
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i))
|
|
||||||
|
if i != grid.prod():
|
||||||
microstructure = microstructure.reshape(grid,order='F')
|
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i))
|
||||||
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
|
|
||||||
microstructure = microstructure.astype('int')
|
microstructure = microstructure.reshape(grid,order='F')
|
||||||
|
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
|
||||||
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
|
microstructure = microstructure.astype('int')
|
||||||
|
|
||||||
def to_file(self,fname):
|
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
|
||||||
"""
|
|
||||||
Writes a geom file.
|
|
||||||
|
def to_file(self,fname,pack=None):
|
||||||
Parameters
|
"""
|
||||||
----------
|
Writes a geom file.
|
||||||
fname : str or file handle
|
|
||||||
geometry file to write.
|
Parameters
|
||||||
|
----------
|
||||||
"""
|
fname : str or file handle
|
||||||
header = self.get_header()
|
geometry file to write.
|
||||||
grid = self.get_grid()
|
pack : bool, optional
|
||||||
format_string = '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) if self.microstructure.dtype == int \
|
compress geometry with 'x of y' and 'a to b'.
|
||||||
else '%g'
|
|
||||||
np.savetxt(fname,
|
"""
|
||||||
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
header = self.get_header()
|
||||||
header='\n'.join(header), fmt=format_string, comments='')
|
grid = self.get_grid()
|
||||||
|
|
||||||
|
if pack is None:
|
||||||
|
plain = grid.prod()/np.unique(self.microstructure).size < 250
|
||||||
def to_vtk(self,fname=None):
|
else:
|
||||||
"""
|
plain = not pack
|
||||||
Generates vtk file.
|
|
||||||
|
if plain:
|
||||||
Parameters
|
format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \
|
||||||
----------
|
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
|
||||||
fname : str, optional
|
np.savetxt(fname,
|
||||||
vtk file to write. If no file is given, a string is returned.
|
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
||||||
|
header='\n'.join(header), fmt=format_string, comments='')
|
||||||
"""
|
else:
|
||||||
grid = self.get_grid() + np.ones(3,dtype=int)
|
try:
|
||||||
size = self.get_size()
|
f = open(fname,'w')
|
||||||
origin = self.get_origin()
|
except TypeError:
|
||||||
|
f = fname
|
||||||
coords = [
|
|
||||||
np.linspace(0,size[0],grid[0]) + origin[0],
|
compressType = None
|
||||||
np.linspace(0,size[1],grid[1]) + origin[1],
|
former = start = -1
|
||||||
np.linspace(0,size[2],grid[2]) + origin[2]
|
reps = 0
|
||||||
]
|
for current in self.microstructure.flatten('F'):
|
||||||
|
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
||||||
rGrid = vtk.vtkRectilinearGrid()
|
compressType = 'to'
|
||||||
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
reps += 1
|
||||||
|
elif current == former and start == former:
|
||||||
rGrid.SetDimensions(*grid)
|
compressType = 'of'
|
||||||
for d,coord in enumerate(coords):
|
reps += 1
|
||||||
for c in coord:
|
else:
|
||||||
coordArray[d].InsertNextValue(c)
|
if compressType is None:
|
||||||
|
f.write('\n'.join(self.get_header())+'\n')
|
||||||
rGrid.SetXCoordinates(coordArray[0])
|
elif compressType == '.':
|
||||||
rGrid.SetYCoordinates(coordArray[1])
|
f.write('{}\n'.format(former))
|
||||||
rGrid.SetZCoordinates(coordArray[2])
|
elif compressType == 'to':
|
||||||
|
f.write('{} to {}\n'.format(start,former))
|
||||||
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
|
elif compressType == 'of':
|
||||||
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
|
f.write('{} of {}\n'.format(reps,former))
|
||||||
ms.SetName('microstructure')
|
|
||||||
rGrid.GetCellData().AddArray(ms)
|
compressType = '.'
|
||||||
|
start = current
|
||||||
|
reps = 1
|
||||||
if fname is None:
|
|
||||||
writer = vtk.vtkDataSetWriter()
|
former = current
|
||||||
writer.SetHeader('damask.Geom '+version)
|
|
||||||
writer.WriteToOutputStringOn()
|
if compressType == '.':
|
||||||
else:
|
f.write('{}\n'.format(former))
|
||||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
elif compressType == 'to':
|
||||||
writer.SetCompressorTypeToZLib()
|
f.write('{} to {}\n'.format(start,former))
|
||||||
writer.SetDataModeToBinary()
|
elif compressType == 'of':
|
||||||
|
f.write('{} of {}\n'.format(reps,former))
|
||||||
ext = os.path.splitext(fname)[1]
|
|
||||||
if ext == '':
|
|
||||||
name = fname + '.' + writer.GetDefaultFileExtension()
|
def to_vtk(self,fname=None):
|
||||||
elif ext == writer.GetDefaultFileExtension():
|
"""
|
||||||
name = fname
|
Generates vtk file.
|
||||||
else:
|
|
||||||
raise ValueError("unknown extension {}".format(ext))
|
Parameters
|
||||||
writer.SetFileName(name)
|
----------
|
||||||
|
fname : str, optional
|
||||||
writer.SetInputData(rGrid)
|
vtk file to write. If no file is given, a string is returned.
|
||||||
writer.Write()
|
|
||||||
|
"""
|
||||||
if fname is None: return writer.GetOutputString()
|
grid = self.get_grid() + np.ones(3,dtype=int)
|
||||||
|
size = self.get_size()
|
||||||
|
origin = self.get_origin()
|
||||||
def show(self):
|
|
||||||
"""Show raw content (as in file)."""
|
coords = [
|
||||||
f=StringIO()
|
np.linspace(0,size[0],grid[0]) + origin[0],
|
||||||
self.to_file(f)
|
np.linspace(0,size[1],grid[1]) + origin[1],
|
||||||
f.seek(0)
|
np.linspace(0,size[2],grid[2]) + origin[2]
|
||||||
return ''.join(f.readlines())
|
]
|
||||||
|
|
||||||
|
rGrid = vtk.vtkRectilinearGrid()
|
||||||
|
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
||||||
|
|
||||||
|
rGrid.SetDimensions(*grid)
|
||||||
|
for d,coord in enumerate(coords):
|
||||||
|
for c in coord:
|
||||||
|
coordArray[d].InsertNextValue(c)
|
||||||
|
|
||||||
|
rGrid.SetXCoordinates(coordArray[0])
|
||||||
|
rGrid.SetYCoordinates(coordArray[1])
|
||||||
|
rGrid.SetZCoordinates(coordArray[2])
|
||||||
|
|
||||||
|
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
|
||||||
|
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
|
||||||
|
ms.SetName('microstructure')
|
||||||
|
rGrid.GetCellData().AddArray(ms)
|
||||||
|
|
||||||
|
|
||||||
|
if fname is None:
|
||||||
|
writer = vtk.vtkDataSetWriter()
|
||||||
|
writer.SetHeader('damask.Geom '+version)
|
||||||
|
writer.WriteToOutputStringOn()
|
||||||
|
else:
|
||||||
|
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||||
|
writer.SetCompressorTypeToZLib()
|
||||||
|
writer.SetDataModeToBinary()
|
||||||
|
|
||||||
|
ext = os.path.splitext(fname)[1]
|
||||||
|
if ext == '':
|
||||||
|
name = fname + '.' + writer.GetDefaultFileExtension()
|
||||||
|
elif ext == writer.GetDefaultFileExtension():
|
||||||
|
name = fname
|
||||||
|
else:
|
||||||
|
raise ValueError("unknown extension {}".format(ext))
|
||||||
|
writer.SetFileName(name)
|
||||||
|
|
||||||
|
writer.SetInputData(rGrid)
|
||||||
|
writer.Write()
|
||||||
|
|
||||||
|
if fname is None: return writer.GetOutputString()
|
||||||
|
|
||||||
|
|
||||||
|
def show(self):
|
||||||
|
"""Show raw content (as in file)."""
|
||||||
|
f=StringIO()
|
||||||
|
self.to_file(f)
|
||||||
|
f.seek(0)
|
||||||
|
return ''.join(f.readlines())
|
||||||
|
|
||||||
|
|
||||||
|
def mirror(self,directions,reflect=False):
|
||||||
|
"""
|
||||||
|
Mirror microstructure along given directions.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
directions : iterable containing str
|
||||||
|
direction(s) along which the microstructure is mirrored. Valid entries are 'x', 'y', 'z'.
|
||||||
|
reflect : bool, optional
|
||||||
|
reflect (include) outermost layers.
|
||||||
|
|
||||||
|
"""
|
||||||
|
valid = {'x','y','z'}
|
||||||
|
if not all(isinstance(d, str) for d in directions):
|
||||||
|
raise TypeError('Directions are not of type str.')
|
||||||
|
elif not set(directions).issubset(valid):
|
||||||
|
raise ValueError('Invalid direction specified {}'.format(*set(directions).difference(valid)))
|
||||||
|
|
||||||
|
limits = [None,None] if reflect else [-2,0]
|
||||||
|
ms = self.get_microstructure()
|
||||||
|
|
||||||
|
if 'z' in directions:
|
||||||
|
ms = np.concatenate([ms,ms[:,:,limits[0]:limits[1]:-1]],2)
|
||||||
|
if 'y' in directions:
|
||||||
|
ms = np.concatenate([ms,ms[:,limits[0]:limits[1]:-1,:]],1)
|
||||||
|
if 'x' in directions:
|
||||||
|
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
|
||||||
|
|
||||||
|
return self.update(ms,rescale=True)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
|
def scale(self,grid):
|
||||||
|
"""
|
||||||
|
Scale microstructure to new grid.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
grid : iterable of int
|
||||||
|
new grid dimension
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.update(
|
||||||
|
ndimage.interpolation.zoom(
|
||||||
|
self.microstructure,
|
||||||
|
grid/self.get_grid(),
|
||||||
|
output=self.microstructure.dtype,
|
||||||
|
order=0,
|
||||||
|
mode='nearest',
|
||||||
|
prefilter=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
|
def clean(self,stencil=3):
|
||||||
|
"""
|
||||||
|
Smooth microstructure by selecting most frequent index within given stencil at each location.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
stencil : int, optional
|
||||||
|
size of smoothing stencil.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def mostFrequent(arr):
|
||||||
|
unique, inverse = np.unique(arr, return_inverse=True)
|
||||||
|
return unique[np.argmax(np.bincount(inverse))]
|
||||||
|
|
||||||
|
return self.update(ndimage.filters.generic_filter(
|
||||||
|
self.microstructure,
|
||||||
|
mostFrequent,
|
||||||
|
size=(stencil,)*3
|
||||||
|
).astype(self.microstructure.dtype)
|
||||||
|
)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
|
def renumber(self):
|
||||||
|
"""Renumber sorted microstructure indices to 1,...,N."""
|
||||||
|
renumbered = np.empty(self.get_grid(),dtype=self.microstructure.dtype)
|
||||||
|
for i, oldID in enumerate(np.unique(self.microstructure)):
|
||||||
|
renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
|
||||||
|
|
||||||
|
return self.update(renumbered)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
|
@ -48,10 +48,10 @@ def strain_tensor(F,t,m):
|
||||||
|
|
||||||
if m > 0.0:
|
if m > 0.0:
|
||||||
eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
|
eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
|
||||||
- np.broadcast_to(np.ones(3),[F_.shape[0],3]))
|
- np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
|
||||||
elif m < 0.0:
|
elif m < 0.0:
|
||||||
eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
|
eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
|
||||||
+ np.broadcast_to(np.ones(3),[F_.shape[0],3]))
|
+ np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
|
||||||
else:
|
else:
|
||||||
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
|
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
|
||||||
|
|
||||||
|
@ -190,7 +190,7 @@ def rotational_part(x):
|
||||||
Tensor of which the rotational part is computed.
|
Tensor of which the rotational part is computed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return __polar_decomposition(x,'R')
|
return __polar_decomposition(x,'R')[0]
|
||||||
|
|
||||||
|
|
||||||
def left_stretch(x):
|
def left_stretch(x):
|
||||||
|
@ -203,7 +203,7 @@ def left_stretch(x):
|
||||||
Tensor of which the left stretch is computed.
|
Tensor of which the left stretch is computed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return __polar_decomposition(x,'V')
|
return __polar_decomposition(x,'V')[0]
|
||||||
|
|
||||||
|
|
||||||
def right_stretch(x):
|
def right_stretch(x):
|
||||||
|
@ -216,7 +216,7 @@ def right_stretch(x):
|
||||||
Tensor of which the right stretch is computed.
|
Tensor of which the right stretch is computed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return __polar_decomposition(x,'U')
|
return __polar_decomposition(x,'U')[0]
|
||||||
|
|
||||||
|
|
||||||
def __polar_decomposition(x,requested):
|
def __polar_decomposition(x,requested):
|
||||||
|
@ -227,7 +227,7 @@ def __polar_decomposition(x,requested):
|
||||||
----------
|
----------
|
||||||
x : numpy.array of shape (:,3,3) or (3,3)
|
x : numpy.array of shape (:,3,3) or (3,3)
|
||||||
Tensor of which the singular values are computed.
|
Tensor of which the singular values are computed.
|
||||||
requested : list of str
|
requested : iterable of str
|
||||||
Requested outputs: ‘R’ for the rotation tensor,
|
Requested outputs: ‘R’ for the rotation tensor,
|
||||||
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
||||||
|
|
||||||
|
|
|
@ -79,9 +79,9 @@ class Marc(Solver):
|
||||||
exitnumber = -1
|
exitnumber = -1
|
||||||
fid_out = open(outFile,'r')
|
fid_out = open(outFile,'r')
|
||||||
for line in fid_out:
|
for line in fid_out:
|
||||||
if (string.find(line,'tress iteration') is not -1):
|
if (string.find(line,'tress iteration') != -1):
|
||||||
print(line)
|
print(line)
|
||||||
elif (string.find(line,'Exit number') is not -1):
|
elif (string.find(line,'Exit number') != -1):
|
||||||
substr = line[string.find(line,'Exit number'):len(line)]
|
substr = line[string.find(line,'Exit number'):len(line)]
|
||||||
exitnumber = int(substr[12:16])
|
exitnumber = int(substr[12:16])
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,sys,shutil
|
import os,sys,shutil
|
||||||
import logging,logging.config
|
import logging,logging.config
|
||||||
import damask
|
import damask
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from collections import Iterable
|
from collections.abc import Iterable
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
class Test():
|
class Test():
|
||||||
|
@ -17,7 +15,7 @@ class Test():
|
||||||
variants = []
|
variants = []
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
|
"""New test."""
|
||||||
defaults = {'description': '',
|
defaults = {'description': '',
|
||||||
'keep': False,
|
'keep': False,
|
||||||
'accept': False,
|
'accept': False,
|
||||||
|
@ -120,22 +118,22 @@ class Test():
|
||||||
"""Delete directory tree containing current results."""
|
"""Delete directory tree containing current results."""
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(self.dirCurrent())
|
shutil.rmtree(self.dirCurrent())
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(self.dirCurrent())
|
os.mkdir(self.dirCurrent())
|
||||||
return True
|
return True
|
||||||
except:
|
except FileExistsError:
|
||||||
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def prepareAll(self):
|
def prepareAll(self):
|
||||||
"""Do all necessary preparations for the whole test"""
|
"""Do all necessary preparations for the whole test."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def prepare(self,variant):
|
def prepare(self,variant):
|
||||||
"""Do all necessary preparations for the run of each test variant"""
|
"""Do all necessary preparations for the run of each test variant."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -207,9 +205,9 @@ class Test():
|
||||||
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
|
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(source,target)
|
shutil.copy2(source,target)
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.critical('error copying {} to {}'.format(source,target))
|
logging.critical('error copying {} to {}'.format(source,target))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
|
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -218,9 +216,9 @@ class Test():
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
|
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -230,10 +228,10 @@ class Test():
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.error(os.path.join(source,f))
|
logging.error(os.path.join(source,f))
|
||||||
logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
|
logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -242,9 +240,9 @@ class Test():
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
|
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
|
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -253,9 +251,9 @@ class Test():
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
|
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -263,9 +261,10 @@ class Test():
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except FileNotFoundError:
|
||||||
logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
|
logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
|
||||||
raise
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
def execute_inCurrentDir(self,cmd,streamIn=None,env=None):
|
def execute_inCurrentDir(self,cmd,streamIn=None,env=None):
|
||||||
|
|
||||||
|
@ -439,7 +438,7 @@ class Test():
|
||||||
stdTol = 1.0e-6,
|
stdTol = 1.0e-6,
|
||||||
preFilter = 1.0e-9):
|
preFilter = 1.0e-9):
|
||||||
"""
|
"""
|
||||||
Calculate statistics of tables
|
Calculate statistics of tables.
|
||||||
|
|
||||||
threshold can be used to ignore small values (a negative number disables this feature)
|
threshold can be used to ignore small values (a negative number disables this feature)
|
||||||
"""
|
"""
|
||||||
|
@ -492,7 +491,7 @@ class Test():
|
||||||
rtol = 1e-5,
|
rtol = 1e-5,
|
||||||
atol = 1e-8,
|
atol = 1e-8,
|
||||||
debug = False):
|
debug = False):
|
||||||
"""Compare multiple tables with np.allclose"""
|
"""Compare multiple tables with np.allclose."""
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
"""Test functionality."""
|
|
||||||
|
|
||||||
from .test import Test # noqa
|
|
Loading…
Reference in New Issue