Merge branch 'no-crystallite' into almost-no-postResults
This commit is contained in:
commit
530f2d3c08
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit a3a88933cbb92b81d481305ce93374917baf3980
|
Subproject commit 66d562c755cd9aa4bbb8280c509383014acd52db
|
|
@ -1,9 +0,0 @@
|
||||||
[all]
|
|
||||||
(output) orientation # quaternion
|
|
||||||
(output) grainrotation # deviation from initial orientation as axis (1-3) and angle in degree (4) in crystal reference coordinates
|
|
||||||
(output) F # deformation gradient tensor
|
|
||||||
(output) Fe # elastic deformation gradient tensor
|
|
||||||
(output) Fp # plastic deformation gradient tensor
|
|
||||||
(output) P # first Piola-Kichhoff stress tensor
|
|
||||||
(output) S # second Piola-Kichhoff stress tensor
|
|
||||||
(output) Lp # plastic velocity gradient tensor
|
|
|
@ -59,35 +59,35 @@ for filename in options.filenames:
|
||||||
data = np.concatenate((data,coords),1)
|
data = np.concatenate((data,coords),1)
|
||||||
header+=' 1_pos 2_pos 3_pos'
|
header+=' 1_pos 2_pos 3_pos'
|
||||||
|
|
||||||
|
results.set_visible('materialpoints',False)
|
||||||
|
results.set_visible('constituents', True)
|
||||||
for label in options.con:
|
for label in options.con:
|
||||||
for p in results.iter_visible('con_physics'):
|
x = results.get_dataset_location(label)
|
||||||
for c in results.iter_visible('constituents'):
|
if len(x) == 0:
|
||||||
x = results.get_dataset_location(label)
|
continue
|
||||||
if len(x) == 0:
|
array = results.read_dataset(x,0,plain=True)
|
||||||
continue
|
d = np.product(np.shape(array)[1:])
|
||||||
array = results.read_dataset(x,0,plain=True)
|
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
||||||
d = int(np.product(np.shape(array)[1:]))
|
|
||||||
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
|
||||||
|
|
||||||
if d>1:
|
if d>1:
|
||||||
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
|
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
|
||||||
else:
|
else:
|
||||||
header+=' '+label
|
header+=' '+label
|
||||||
|
|
||||||
|
results.set_visible('constituents', False)
|
||||||
|
results.set_visible('materialpoints',True)
|
||||||
for label in options.mat:
|
for label in options.mat:
|
||||||
for p in results.iter_visible('mat_physics'):
|
x = results.get_dataset_location(label)
|
||||||
for m in results.iter_visible('materialpoints'):
|
if len(x) == 0:
|
||||||
x = results.get_dataset_location(label)
|
continue
|
||||||
if len(x) == 0:
|
array = results.read_dataset(x,0,plain=True)
|
||||||
continue
|
d = np.product(np.shape(array)[1:])
|
||||||
array = results.read_dataset(x,0,plain=True)
|
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
||||||
d = int(np.product(np.shape(array)[1:]))
|
|
||||||
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
|
|
||||||
|
|
||||||
if d>1:
|
if d>1:
|
||||||
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
|
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
|
||||||
else:
|
else:
|
||||||
header+=' '+label
|
header+=' '+label
|
||||||
|
|
||||||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
|
|
|
@ -74,7 +74,6 @@ for filename in options.filenames:
|
||||||
results.set_visible('materialpoints',False)
|
results.set_visible('materialpoints',False)
|
||||||
results.set_visible('constituents', True)
|
results.set_visible('constituents', True)
|
||||||
for label in options.con:
|
for label in options.con:
|
||||||
|
|
||||||
for p in results.iter_visible('con_physics'):
|
for p in results.iter_visible('con_physics'):
|
||||||
if p != 'generic':
|
if p != 'generic':
|
||||||
for c in results.iter_visible('constituents'):
|
for c in results.iter_visible('constituents'):
|
||||||
|
|
|
@ -132,7 +132,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
|
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ for name in filenames:
|
||||||
canvas = np.full(new,options.fill if options.fill is not None
|
canvas = np.full(new,options.fill if options.fill is not None
|
||||||
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
||||||
|
|
||||||
l = np.clip( offset, 0,np.minimum(old +offset,new))
|
l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
|
||||||
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
||||||
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
||||||
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
||||||
|
@ -71,7 +71,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -37,12 +37,6 @@ for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
damask.util.croak(geom.clean(options.stencil))
|
damask.util.croak(geom.clean(options.stencil))
|
||||||
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -86,7 +86,7 @@ for name in filenames:
|
||||||
* inFile[os.path.join(group_geom,'SPACING')][...]
|
* inFile[os.path.join(group_geom,'SPACING')][...]
|
||||||
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
||||||
origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Geometry data ({}) not found'.format(group_geom))
|
errors.append('Geometry data ({}) not found'.format(group_geom))
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,13 +98,13 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
dataset = os.path.join(group_pointwise,options.phase)
|
dataset = os.path.join(group_pointwise,options.phase)
|
||||||
try:
|
try:
|
||||||
phase = np.reshape(inFile[dataset][...],(np.product(grid)))
|
phase = np.reshape(inFile[dataset][...],(np.product(grid)))
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Pointwise phase data ({}) not readable'.format(dataset))
|
errors.append('Pointwise phase data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
|
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
|
||||||
|
@ -116,7 +116,7 @@ for name in filenames:
|
||||||
dataset = os.path.join(group_pointwise,options.microstructure)
|
dataset = os.path.join(group_pointwise,options.microstructure)
|
||||||
try:
|
try:
|
||||||
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
|
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
|
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
group_average = os.path.join(rootDir,options.basegroup,options.average)
|
group_average = os.path.join(rootDir,options.basegroup,options.average)
|
||||||
|
@ -124,13 +124,13 @@ for name in filenames:
|
||||||
dataset = os.path.join(group_average,options.quaternion)
|
dataset = os.path.join(group_average,options.quaternion)
|
||||||
try:
|
try:
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
dataset = os.path.join(group_average,options.phase)
|
dataset = os.path.join(group_average,options.phase)
|
||||||
try:
|
try:
|
||||||
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
|
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
|
||||||
except:
|
except KeyError:
|
||||||
errors.append('Average phase data ({}) not readable'.format(dataset))
|
errors.append('Average phase data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
@ -155,4 +155,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
geom.to_file(os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
|
|
|
@ -89,7 +89,4 @@ geom=damask.Geom(microstructure,options.size,
|
||||||
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -145,7 +145,4 @@ geom = damask.Geom(microstructure.reshape(grid),
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -63,7 +63,4 @@ geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype)
|
||||||
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
|
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -152,7 +152,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
|
||||||
|
|
|
@ -302,7 +302,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
|
||||||
|
|
|
@ -172,7 +172,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
|
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -42,11 +42,6 @@ for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
damask.util.croak(geom.mirror(options.directions,options.reflect))
|
damask.util.croak(geom.mirror(options.directions,options.reflect))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -34,41 +34,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
compressType = None
|
geom.to_file(sys.stdout if name is None else name,pack=True)
|
||||||
former = start = -1
|
|
||||||
reps = 0
|
|
||||||
|
|
||||||
if name is None:
|
|
||||||
f = sys.stdout
|
|
||||||
else:
|
|
||||||
f= open(name,'w')
|
|
||||||
|
|
||||||
for current in geom.microstructure.flatten('F'):
|
|
||||||
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
|
||||||
compressType = 'to'
|
|
||||||
reps += 1
|
|
||||||
elif current == former and start == former:
|
|
||||||
compressType = 'of'
|
|
||||||
reps += 1
|
|
||||||
else:
|
|
||||||
if compressType is None:
|
|
||||||
f.write('\n'.join(geom.get_header())+'\n')
|
|
||||||
elif compressType == '.':
|
|
||||||
f.write('{}\n'.format(former))
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write('{} to {}\n'.format(start,former))
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write('{} of {}\n'.format(reps,former))
|
|
||||||
|
|
||||||
compressType = '.'
|
|
||||||
start = current
|
|
||||||
reps = 1
|
|
||||||
|
|
||||||
former = current
|
|
||||||
|
|
||||||
if compressType == '.':
|
|
||||||
f.write('{}\n'.format(former))
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write('{} to {}\n'.format(start,former))
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write('{} of {}\n'.format(reps,former))
|
|
||||||
|
|
|
@ -5,8 +5,6 @@ import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,15 +30,6 @@ for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
damask.util.croak(geom.renumber())
|
||||||
renumbered = np.empty(geom.get_grid(),dtype=geom.microstructure.dtype)
|
|
||||||
for i, oldID in enumerate(np.unique(geom.microstructure)):
|
|
||||||
renumbered = np.where(geom.microstructure == oldID, i+1, renumbered)
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(renumbered))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -2,11 +2,10 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from scipy import ndimage
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -55,20 +54,7 @@ for name in filenames:
|
||||||
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
|
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
|
||||||
else float(n) for o,n in zip(size,options.size)],dtype=float)
|
else float(n) for o,n in zip(size,options.size)],dtype=float)
|
||||||
|
|
||||||
damask.util.croak(geom.update(microstructure =
|
geom.scale(new_grid)
|
||||||
ndimage.interpolation.zoom(
|
damask.util.croak(geom.update(microstructure = None,size = new_size))
|
||||||
geom.microstructure,
|
|
||||||
new_grid/grid,
|
|
||||||
output=geom.microstructure.dtype,
|
|
||||||
order=0,
|
|
||||||
mode='nearest',
|
|
||||||
prefilter=False,
|
|
||||||
) if np.any(new_grid != grid) \
|
|
||||||
else None,
|
|
||||||
size = new_size))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
if name is None:
|
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -95,7 +95,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -58,7 +58,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -34,7 +34,4 @@ for name in filenames:
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -82,7 +82,4 @@ for name in filenames:
|
||||||
geom.microstructure + offset,geom.microstructure)))
|
geom.microstructure + offset,geom.microstructure)))
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
if name is None:
|
geom.to_file(sys.stdout if name is None else name,pack=False)
|
||||||
sys.stdout.write(str(geom.show()))
|
|
||||||
else:
|
|
||||||
geom.to_file(name)
|
|
||||||
|
|
|
@ -250,11 +250,15 @@ class Geom():
|
||||||
geometry file to read.
|
geometry file to read.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
with (open(fname) if isinstance(fname,str) else fname) as f:
|
try:
|
||||||
f.seek(0)
|
f = open(fname)
|
||||||
header_length,keyword = f.readline().split()[:2]
|
except TypeError:
|
||||||
header_length = int(header_length)
|
f = fname
|
||||||
content = f.readlines()
|
|
||||||
|
f.seek(0)
|
||||||
|
header_length,keyword = f.readline().split()[:2]
|
||||||
|
header_length = int(header_length)
|
||||||
|
content = f.readlines()
|
||||||
|
|
||||||
if not keyword.startswith('head') or header_length < 3:
|
if not keyword.startswith('head') or header_length < 3:
|
||||||
raise TypeError('Header length information missing or invalid')
|
raise TypeError('Header length information missing or invalid')
|
||||||
|
@ -299,7 +303,7 @@ class Geom():
|
||||||
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
|
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname):
|
def to_file(self,fname,pack=None):
|
||||||
"""
|
"""
|
||||||
Writes a geom file.
|
Writes a geom file.
|
||||||
|
|
||||||
|
@ -307,15 +311,63 @@ class Geom():
|
||||||
----------
|
----------
|
||||||
fname : str or file handle
|
fname : str or file handle
|
||||||
geometry file to write.
|
geometry file to write.
|
||||||
|
pack : bool, optional
|
||||||
|
compress geometry with 'x of y' and 'a to b'.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
header = self.get_header()
|
header = self.get_header()
|
||||||
grid = self.get_grid()
|
grid = self.get_grid()
|
||||||
format_string = '%g' if self.microstructure in np.sctypes['float'] else \
|
|
||||||
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
|
if pack is None:
|
||||||
np.savetxt(fname,
|
plain = grid.prod()/np.unique(self.microstructure).size < 250
|
||||||
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
else:
|
||||||
header='\n'.join(header), fmt=format_string, comments='')
|
plain = not pack
|
||||||
|
|
||||||
|
if plain:
|
||||||
|
format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \
|
||||||
|
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
|
||||||
|
np.savetxt(fname,
|
||||||
|
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
||||||
|
header='\n'.join(header), fmt=format_string, comments='')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
f = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
f = fname
|
||||||
|
|
||||||
|
compressType = None
|
||||||
|
former = start = -1
|
||||||
|
reps = 0
|
||||||
|
for current in self.microstructure.flatten('F'):
|
||||||
|
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
||||||
|
compressType = 'to'
|
||||||
|
reps += 1
|
||||||
|
elif current == former and start == former:
|
||||||
|
compressType = 'of'
|
||||||
|
reps += 1
|
||||||
|
else:
|
||||||
|
if compressType is None:
|
||||||
|
f.write('\n'.join(self.get_header())+'\n')
|
||||||
|
elif compressType == '.':
|
||||||
|
f.write('{}\n'.format(former))
|
||||||
|
elif compressType == 'to':
|
||||||
|
f.write('{} to {}\n'.format(start,former))
|
||||||
|
elif compressType == 'of':
|
||||||
|
f.write('{} of {}\n'.format(reps,former))
|
||||||
|
|
||||||
|
compressType = '.'
|
||||||
|
start = current
|
||||||
|
reps = 1
|
||||||
|
|
||||||
|
former = current
|
||||||
|
|
||||||
|
if compressType == '.':
|
||||||
|
f.write('{}\n'.format(former))
|
||||||
|
elif compressType == 'to':
|
||||||
|
f.write('{} to {}\n'.format(start,former))
|
||||||
|
elif compressType == 'of':
|
||||||
|
f.write('{} of {}\n'.format(reps,former))
|
||||||
|
|
||||||
|
|
||||||
def to_vtk(self,fname=None):
|
def to_vtk(self,fname=None):
|
||||||
"""
|
"""
|
||||||
|
@ -419,6 +471,29 @@ class Geom():
|
||||||
#self.add_comments('tbd')
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
|
def scale(self,grid):
|
||||||
|
"""
|
||||||
|
Scale microstructure to new grid.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
grid : iterable of int
|
||||||
|
new grid dimension
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.update(
|
||||||
|
ndimage.interpolation.zoom(
|
||||||
|
self.microstructure,
|
||||||
|
grid/self.get_grid(),
|
||||||
|
output=self.microstructure.dtype,
|
||||||
|
order=0,
|
||||||
|
mode='nearest',
|
||||||
|
prefilter=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
def clean(self,stencil=3):
|
def clean(self,stencil=3):
|
||||||
"""
|
"""
|
||||||
Smooth microstructure by selecting most frequent index within given stencil at each location.
|
Smooth microstructure by selecting most frequent index within given stencil at each location.
|
||||||
|
@ -433,7 +508,20 @@ class Geom():
|
||||||
unique, inverse = np.unique(arr, return_inverse=True)
|
unique, inverse = np.unique(arr, return_inverse=True)
|
||||||
return unique[np.argmax(np.bincount(inverse))]
|
return unique[np.argmax(np.bincount(inverse))]
|
||||||
|
|
||||||
return self.update(ndimage.filters.generic_filter(self.microstructure,
|
return self.update(ndimage.filters.generic_filter(
|
||||||
|
self.microstructure,
|
||||||
mostFrequent,
|
mostFrequent,
|
||||||
size=(stencil,)*3).astype(self.microstructure.dtype))
|
size=(stencil,)*3
|
||||||
|
).astype(self.microstructure.dtype)
|
||||||
|
)
|
||||||
|
#self.add_comments('tbd')
|
||||||
|
|
||||||
|
|
||||||
|
def renumber(self):
|
||||||
|
"""Renumber sorted microstructure indices to 1,...,N."""
|
||||||
|
renumbered = np.empty(self.get_grid(),dtype=self.microstructure.dtype)
|
||||||
|
for i, oldID in enumerate(np.unique(self.microstructure)):
|
||||||
|
renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
|
||||||
|
|
||||||
|
return self.update(renumbered)
|
||||||
#self.add_comments('tbd')
|
#self.add_comments('tbd')
|
||||||
|
|
|
@ -592,15 +592,13 @@ subroutine materialpoint_postResults
|
||||||
thePos, &
|
thePos, &
|
||||||
theSize, &
|
theSize, &
|
||||||
myNgrains, &
|
myNgrains, &
|
||||||
myCrystallite, &
|
|
||||||
g, & !< grain number
|
g, & !< grain number
|
||||||
i, & !< integration point number
|
i, & !< integration point number
|
||||||
e !< element number
|
e !< element number
|
||||||
|
|
||||||
!$OMP PARALLEL DO PRIVATE(myNgrains,myCrystallite,thePos,theSize)
|
!$OMP PARALLEL DO PRIVATE(myNgrains,thePos,theSize)
|
||||||
elementLooping: do e = FEsolving_execElem(1),FEsolving_execElem(2)
|
elementLooping: do e = FEsolving_execElem(1),FEsolving_execElem(2)
|
||||||
myNgrains = homogenization_Ngrains(material_homogenizationAt(e))
|
myNgrains = homogenization_Ngrains(material_homogenizationAt(e))
|
||||||
myCrystallite = microstructure_crystallite(discretization_microstructureAt(e))
|
|
||||||
IpLooping: do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
|
IpLooping: do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
|
||||||
thePos = 0
|
thePos = 0
|
||||||
|
|
||||||
|
|
|
@ -120,8 +120,7 @@ module material
|
||||||
homogenization_Noutput, & !< number of '(output)' items per homogenization
|
homogenization_Noutput, & !< number of '(output)' items per homogenization
|
||||||
homogenization_typeInstance, & !< instance of particular type of each homogenization
|
homogenization_typeInstance, & !< instance of particular type of each homogenization
|
||||||
thermal_typeInstance, & !< instance of particular type of each thermal transport
|
thermal_typeInstance, & !< instance of particular type of each thermal transport
|
||||||
damage_typeInstance, & !< instance of particular type of each nonlocal damage
|
damage_typeInstance !< instance of particular type of each nonlocal damage
|
||||||
microstructure_crystallite !< crystallite setting ID of each microstructure ! DEPRECATED !!!!
|
|
||||||
|
|
||||||
real(pReal), dimension(:), allocatable, public, protected :: &
|
real(pReal), dimension(:), allocatable, public, protected :: &
|
||||||
thermal_initialT, & !< initial temperature per each homogenization
|
thermal_initialT, & !< initial temperature per each homogenization
|
||||||
|
@ -273,9 +272,6 @@ subroutine material_init
|
||||||
allocate(temperatureRate (material_Nhomogenization))
|
allocate(temperatureRate (material_Nhomogenization))
|
||||||
|
|
||||||
do m = 1,size(config_microstructure)
|
do m = 1,size(config_microstructure)
|
||||||
if(microstructure_crystallite(m) < 1 .or. &
|
|
||||||
microstructure_crystallite(m) > size(config_crystallite)) &
|
|
||||||
call IO_error(150,m,ext_msg='crystallite')
|
|
||||||
if(minval(microstructure_phase(1:microstructure_Nconstituents(m),m)) < 1 .or. &
|
if(minval(microstructure_phase(1:microstructure_Nconstituents(m),m)) < 1 .or. &
|
||||||
maxval(microstructure_phase(1:microstructure_Nconstituents(m),m)) > size(config_phase)) &
|
maxval(microstructure_phase(1:microstructure_Nconstituents(m),m)) > size(config_phase)) &
|
||||||
call IO_error(150,m,ext_msg='phase')
|
call IO_error(150,m,ext_msg='phase')
|
||||||
|
@ -294,9 +290,8 @@ subroutine material_init
|
||||||
enddo
|
enddo
|
||||||
write(6,'(/,a14,18x,1x,a11,1x,a12,1x,a13)') 'microstructure','crystallite','constituents'
|
write(6,'(/,a14,18x,1x,a11,1x,a12,1x,a13)') 'microstructure','crystallite','constituents'
|
||||||
do m = 1,size(config_microstructure)
|
do m = 1,size(config_microstructure)
|
||||||
write(6,'(1x,a32,1x,i11,1x,i12)') config_name_microstructure(m), &
|
write(6,'(1x,a32,1x,i12)') config_name_microstructure(m), &
|
||||||
microstructure_crystallite(m), &
|
microstructure_Nconstituents(m)
|
||||||
microstructure_Nconstituents(m)
|
|
||||||
if (microstructure_Nconstituents(m) > 0) then
|
if (microstructure_Nconstituents(m) > 0) then
|
||||||
do c = 1,microstructure_Nconstituents(m)
|
do c = 1,microstructure_Nconstituents(m)
|
||||||
write(6,'(a1,1x,a32,1x,a32,1x,f7.4)') '>',config_name_phase(microstructure_phase(c,m)),&
|
write(6,'(a1,1x,a32,1x,a32,1x,f7.4)') '>',config_name_phase(microstructure_phase(c,m)),&
|
||||||
|
@ -496,7 +491,6 @@ subroutine material_parseMicrostructure
|
||||||
character(len=65536) :: &
|
character(len=65536) :: &
|
||||||
tag
|
tag
|
||||||
|
|
||||||
allocate(microstructure_crystallite(size(config_microstructure)), source=0)
|
|
||||||
allocate(microstructure_Nconstituents(size(config_microstructure)), source=0)
|
allocate(microstructure_Nconstituents(size(config_microstructure)), source=0)
|
||||||
allocate(microstructure_active(size(config_microstructure)), source=.false.)
|
allocate(microstructure_active(size(config_microstructure)), source=.false.)
|
||||||
|
|
||||||
|
@ -508,7 +502,6 @@ subroutine material_parseMicrostructure
|
||||||
|
|
||||||
do m=1, size(config_microstructure)
|
do m=1, size(config_microstructure)
|
||||||
microstructure_Nconstituents(m) = config_microstructure(m)%countKeys('(constituent)')
|
microstructure_Nconstituents(m) = config_microstructure(m)%countKeys('(constituent)')
|
||||||
microstructure_crystallite(m) = config_microstructure(m)%getInt('crystallite')
|
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
microstructure_maxNconstituents = maxval(microstructure_Nconstituents)
|
microstructure_maxNconstituents = maxval(microstructure_Nconstituents)
|
||||||
|
|
Loading…
Reference in New Issue