Merge branch 'development' into variableName=ParameterName
This commit is contained in:
commit
40d28456af
|
@ -25,6 +25,7 @@ before_script:
|
||||||
fi
|
fi
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
|
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
- source $DAMASKROOT/env/DAMASK.sh
|
- source $DAMASKROOT/env/DAMASK.sh
|
||||||
- cd $DAMASKROOT/PRIVATE/testing
|
- cd $DAMASKROOT/PRIVATE/testing
|
||||||
|
@ -87,6 +88,7 @@ checkout:
|
||||||
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
|
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
|
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
script:
|
script:
|
||||||
- mkdir -p $DAMASKROOT
|
- mkdir -p $DAMASKROOT
|
||||||
|
|
|
@ -42,11 +42,10 @@ rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi])
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
coord = - table.get(options.frame)
|
coord = - table.get(options.frame)
|
||||||
coord[:,2] += table.get(options.depth)[:,0]
|
coord[:,2] += table.get(options.depth)[:,0]
|
||||||
|
|
||||||
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
|
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
|
.save((sys.stdout if name is None else name),legacy=True)
|
||||||
table.to_file(sys.stdout if name is None else name)
|
|
||||||
|
|
|
@ -39,10 +39,10 @@ if options.labels is None:
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
|
table = table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
|
||||||
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
|
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name),legacy=True)
|
||||||
|
|
|
@ -38,8 +38,8 @@ for filename in options.filenames:
|
||||||
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
||||||
N_digits = 5 # hack to keep test intact
|
N_digits = 5 # hack to keep test intact
|
||||||
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
|
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
|
||||||
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})
|
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\
|
||||||
table = table.add('pos',coords.reshape(-1,3))
|
.add('pos',coords.reshape(-1,3))
|
||||||
|
|
||||||
results.pick('materialpoints',False)
|
results.pick('materialpoints',False)
|
||||||
results.pick('constituents', True)
|
results.pick('constituents', True)
|
||||||
|
@ -60,4 +60,4 @@ for filename in options.filenames:
|
||||||
os.mkdir(dirname,0o755)
|
os.mkdir(dirname,0o755)
|
||||||
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
||||||
inc[3:].zfill(N_digits))
|
inc[3:].zfill(N_digits))
|
||||||
table.to_file(os.path.join(dirname,file_out))
|
table.save(os.path.join(dirname,file_out),legacy=True)
|
||||||
|
|
|
@ -172,7 +172,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
||||||
|
@ -191,4 +191,4 @@ for name in filenames:
|
||||||
volumeMismatch.reshape(-1,1,order='F'),
|
volumeMismatch.reshape(-1,1,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
|
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -14,9 +14,9 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def derivative(coordinates,what):
|
def derivative(coordinates,what):
|
||||||
|
|
||||||
result = np.empty_like(what)
|
result = np.empty_like(what)
|
||||||
|
|
||||||
# use differentiation by interpolation
|
# use differentiation by interpolation
|
||||||
# as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf
|
# as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ def derivative(coordinates,what):
|
||||||
(coordinates[0] - coordinates[1])
|
(coordinates[0] - coordinates[1])
|
||||||
result[-1,:] = (what[-1,:] - what[-2,:]) / \
|
result[-1,:] = (what[-1,:] - what[-2,:]) / \
|
||||||
(coordinates[-1] - coordinates[-2])
|
(coordinates[-1] - coordinates[-2])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -65,10 +65,10 @@ if options.labels is None:
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
table = table.add('d({})/d({})'.format(label,options.coordinates),
|
table = table.add('d({})/d({})'.format(label,options.coordinates),
|
||||||
derivative(table.get(options.coordinates),table.get(label)),
|
derivative(table.get(options.coordinates),table.get(label)),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -47,25 +47,25 @@ parser.set_defaults(f = 'f',
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
||||||
if options.nodal:
|
if options.nodal:
|
||||||
table = damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'),
|
damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'),
|
||||||
{'pos':(3,)})\
|
{'pos':(3,)})\
|
||||||
.add('avg({}).{}'.format(options.f,options.pos),
|
.add('avg({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))\
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
.add('fluct({}).{}'.format(options.f,options.pos),
|
.add('fluct({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
|
.save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True)
|
||||||
else:
|
else:
|
||||||
table = table.add('avg({}).{}'.format(options.f,options.pos),
|
table.add('avg({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))\
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
.add('fluct({}).{}'.format(options.f,options.pos),
|
.add('fluct({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
table.to_file(sys.stdout if name is None else name)
|
.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
|
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -142,7 +142,7 @@ for i,feature in enumerate(features):
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
neighborhood = neighborhoods[options.neighborhood]
|
neighborhood = neighborhoods[options.neighborhood]
|
||||||
|
@ -158,7 +158,7 @@ for name in filenames:
|
||||||
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
|
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
|
||||||
# ...to every one in the specified neighborhood
|
# ...to every one in the specified neighborhood
|
||||||
# for same IDs at both locations ==> 0
|
# for same IDs at both locations ==> 0
|
||||||
|
|
||||||
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
|
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
|
||||||
# ...reflects number of unique neighbors
|
# ...reflects number of unique neighbors
|
||||||
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
||||||
|
@ -184,4 +184,4 @@ for name in filenames:
|
||||||
distance[i,:],
|
distance[i,:],
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -63,7 +63,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
damask.grid_filters.coord0_check(table.get(options.pos))
|
damask.grid_filters.coord0_check(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -73,4 +73,4 @@ for name in filenames:
|
||||||
mode = 'wrap' if options.periodic else 'nearest'),
|
mode = 'wrap' if options.periodic else 'nearest'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
|
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -110,7 +110,7 @@ R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degree
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
if options.eulers is not None:
|
if options.eulers is not None:
|
||||||
label = options.eulers
|
label = options.eulers
|
||||||
|
@ -147,4 +147,4 @@ for name in filenames:
|
||||||
if 'axisangle' in options.output:
|
if 'axisangle' in options.output:
|
||||||
table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -175,7 +175,7 @@ labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
||||||
|
|
||||||
|
@ -189,4 +189,4 @@ for name in filenames:
|
||||||
for i,label in enumerate(labels):
|
for i,label in enumerate(labels):
|
||||||
table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -27,7 +27,7 @@ def sortingList(labels,whitelistitems):
|
||||||
else:
|
else:
|
||||||
indices.append(0)
|
indices.append(0)
|
||||||
names.append(label)
|
names.append(label)
|
||||||
|
|
||||||
return [indices,names,whitelistitems]
|
return [indices,names,whitelistitems]
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,11 +72,11 @@ for name in filenames:
|
||||||
continue
|
continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ assemble info ---------------------------------------
|
# ------------------------------------------ assemble info ---------------------------------------
|
||||||
|
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
specials = { \
|
specials = { \
|
||||||
'_row_': 0,
|
'_row_': 0,
|
||||||
|
@ -103,12 +103,12 @@ for name in filenames:
|
||||||
else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem
|
else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem
|
||||||
else:
|
else:
|
||||||
order = range(len(labels)) # maintain original order of labels
|
order = range(len(labels)) # maintain original order of labels
|
||||||
|
|
||||||
# --------------------------------------- evaluate condition ---------------------------------------
|
# --------------------------------------- evaluate condition ---------------------------------------
|
||||||
if options.condition is not None:
|
if options.condition is not None:
|
||||||
condition = options.condition # copy per file, since might be altered inline
|
condition = options.condition # copy per file, since might be altered inline
|
||||||
breaker = False
|
breaker = False
|
||||||
|
|
||||||
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
|
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
|
||||||
idx = table.label_index(column)
|
idx = table.label_index(column)
|
||||||
dim = table.label_dimension(column)
|
dim = table.label_dimension(column)
|
||||||
|
@ -123,11 +123,11 @@ for name in filenames:
|
||||||
's#':'str'}[marker],idx) # take float or string value of data column
|
's#':'str'}[marker],idx) # take float or string value of data column
|
||||||
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
||||||
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
||||||
|
|
||||||
condition = condition.replace('#'+all+'#',replacement)
|
condition = condition.replace('#'+all+'#',replacement)
|
||||||
|
|
||||||
if breaker: continue # found mistake in condition evaluation --> next file
|
if breaker: continue # found mistake in condition evaluation --> next file
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
@ -138,7 +138,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ process and output data ------------------------------------------
|
# ------------------------------------------ process and output data ------------------------------------------
|
||||||
|
|
||||||
positions = np.array(positions)[order]
|
positions = np.array(positions)[order]
|
||||||
|
|
||||||
atOnce = options.condition is None
|
atOnce = options.condition is None
|
||||||
if atOnce: # read full array and filter columns
|
if atOnce: # read full array and filter columns
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -47,7 +47,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
rng = np.random.default_rng(randomSeed)
|
rng = np.random.default_rng(randomSeed)
|
||||||
|
@ -58,4 +58,4 @@ for name in filenames:
|
||||||
rng.shuffle(uniques)
|
rng.shuffle(uniques)
|
||||||
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -52,16 +52,11 @@ parser.add_option('-q', '--quaternion',
|
||||||
type = 'string',
|
type = 'string',
|
||||||
metavar='string',
|
metavar='string',
|
||||||
help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]')
|
help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]')
|
||||||
parser.add_option('--homogenization',
|
|
||||||
dest = 'homogenization',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'homogenization index to be used [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(pointwise = 'CellData',
|
parser.set_defaults(pointwise = 'CellData',
|
||||||
quaternion = 'Quats',
|
quaternion = 'Quats',
|
||||||
phase = 'Phases',
|
phase = 'Phases',
|
||||||
microstructure = 'FeatureIds',
|
microstructure = 'FeatureIds',
|
||||||
homogenization = 1,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
@ -150,8 +145,7 @@ for name in filenames:
|
||||||
|
|
||||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||||
+ config_header
|
+ config_header
|
||||||
geom = damask.Geom(microstructure,size,origin,
|
geom = damask.Geom(microstructure,size,origin,comments=header)
|
||||||
homogenization=options.homogenization,comments=header)
|
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False)
|
geom.save_ASCII(os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
|
|
|
@ -52,10 +52,6 @@ parser.add_option('-p', '--periods',
|
||||||
dest = 'periods',
|
dest = 'periods',
|
||||||
type = 'int', metavar = 'int',
|
type = 'int', metavar = 'int',
|
||||||
help = 'number of repetitions of unit cell [%default]')
|
help = 'number of repetitions of unit cell [%default]')
|
||||||
parser.add_option('--homogenization',
|
|
||||||
dest = 'homogenization',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'homogenization index to be used [%default]')
|
|
||||||
parser.add_option('--m',
|
parser.add_option('--m',
|
||||||
dest = 'microstructure',
|
dest = 'microstructure',
|
||||||
type = 'int', nargs = 2, metavar = 'int int',
|
type = 'int', nargs = 2, metavar = 'int int',
|
||||||
|
@ -66,7 +62,6 @@ parser.set_defaults(type = minimal_surfaces[0],
|
||||||
periods = 1,
|
periods = 1,
|
||||||
grid = (16,16,16),
|
grid = (16,16,16),
|
||||||
size = (1.0,1.0,1.0),
|
size = (1.0,1.0,1.0),
|
||||||
homogenization = 1,
|
|
||||||
microstructure = (1,2),
|
microstructure = (1,2),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -85,8 +80,7 @@ microstructure = np.where(options.threshold < surface[options.type](x,y,z),
|
||||||
options.microstructure[1],options.microstructure[0])
|
options.microstructure[1],options.microstructure[0])
|
||||||
|
|
||||||
geom=damask.Geom(microstructure,options.size,
|
geom=damask.Geom(microstructure,options.size,
|
||||||
homogenization=options.homogenization,
|
|
||||||
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
geom.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -57,10 +57,6 @@ parser.add_option('-w', '--omega',
|
||||||
dest='omega',
|
dest='omega',
|
||||||
type='float', metavar = 'float',
|
type='float', metavar = 'float',
|
||||||
help='rotation angle around normal of osteon [%default]')
|
help='rotation angle around normal of osteon [%default]')
|
||||||
parser.add_option( '--homogenization',
|
|
||||||
dest='homogenization',
|
|
||||||
type='int', metavar = 'int',
|
|
||||||
help='homogenization index to be used [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(canal = 25e-6,
|
parser.set_defaults(canal = 25e-6,
|
||||||
osteon = 100e-6,
|
osteon = 100e-6,
|
||||||
|
@ -70,7 +66,7 @@ parser.set_defaults(canal = 25e-6,
|
||||||
amplitude = 60,
|
amplitude = 60,
|
||||||
size = (300e-6,300e-6),
|
size = (300e-6,300e-6),
|
||||||
grid = (512,512),
|
grid = (512,512),
|
||||||
homogenization = 1)
|
)
|
||||||
|
|
||||||
(options,filename) = parser.parse_args()
|
(options,filename) = parser.parse_args()
|
||||||
|
|
||||||
|
@ -139,7 +135,7 @@ header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||||
+ config_header
|
+ config_header
|
||||||
geom = damask.Geom(microstructure.reshape(grid),
|
geom = damask.Geom(microstructure.reshape(grid),
|
||||||
size,-size/2,
|
size,-size/2,
|
||||||
homogenization=options.homogenization,comments=header)
|
comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
geom.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -44,14 +44,9 @@ parser.add_option('--axes',
|
||||||
dest = 'axes',
|
dest = 'axes',
|
||||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
||||||
help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]')
|
help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]')
|
||||||
parser.add_option('--homogenization',
|
|
||||||
dest = 'homogenization',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'homogenization index to be used [%default]')
|
|
||||||
|
|
||||||
|
|
||||||
parser.set_defaults(homogenization = 1,
|
parser.set_defaults(pos = 'pos',
|
||||||
pos = 'pos',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
@ -68,7 +63,7 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow
|
table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
|
@ -102,8 +97,7 @@ for name in filenames:
|
||||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||||
+ config_header
|
+ config_header
|
||||||
geom = damask.Geom(microstructure,size,origin,
|
geom = damask.Geom(microstructure,size,origin,
|
||||||
homogenization=options.homogenization,comments=header)
|
comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
|
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
format='ASCII',pack=False)
|
|
||||||
|
|
|
@ -142,10 +142,6 @@ group.add_option('--without-config',
|
||||||
dest = 'config',
|
dest = 'config',
|
||||||
action = 'store_false',
|
action = 'store_false',
|
||||||
help = 'omit material configuration header')
|
help = 'omit material configuration header')
|
||||||
group.add_option('--homogenization',
|
|
||||||
dest = 'homogenization',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'homogenization index to be used [%default]')
|
|
||||||
group.add_option('--phase',
|
group.add_option('--phase',
|
||||||
dest = 'phase',
|
dest = 'phase',
|
||||||
type = 'int', metavar = 'int',
|
type = 'int', metavar = 'int',
|
||||||
|
@ -157,7 +153,6 @@ parser.set_defaults(pos = 'pos',
|
||||||
weight = 'weight',
|
weight = 'weight',
|
||||||
microstructure = 'microstructure',
|
microstructure = 'microstructure',
|
||||||
eulers = 'euler',
|
eulers = 'euler',
|
||||||
homogenization = 1,
|
|
||||||
phase = 1,
|
phase = 1,
|
||||||
cpus = 2,
|
cpus = 2,
|
||||||
laguerre = False,
|
laguerre = False,
|
||||||
|
@ -171,7 +166,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
size = np.ones(3)
|
size = np.ones(3)
|
||||||
origin = np.zeros(3)
|
origin = np.zeros(3)
|
||||||
|
@ -225,8 +220,7 @@ for name in filenames:
|
||||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||||
+ config_header
|
+ config_header
|
||||||
geom = damask.Geom(indices.reshape(grid),size,origin,
|
geom = damask.Geom(indices.reshape(grid),size,origin,
|
||||||
homogenization=options.homogenization,comments=header)
|
comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
|
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
format='ASCII',pack=False)
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ parser.add_option('-N', '--iterations',
|
||||||
help = 'curvature flow iterations [%default]')
|
help = 'curvature flow iterations [%default]')
|
||||||
parser.add_option('-i', '--immutable',
|
parser.add_option('-i', '--immutable',
|
||||||
action = 'extend', dest = 'immutable', metavar = '<int LIST>',
|
action = 'extend', dest = 'immutable', metavar = '<int LIST>',
|
||||||
help = 'list of immutable microstructure indices')
|
help = 'list of immutable material indices')
|
||||||
parser.add_option('--ndimage',
|
parser.add_option('--ndimage',
|
||||||
dest = 'ndimage', action='store_true',
|
dest = 'ndimage', action='store_true',
|
||||||
help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
|
help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
|
||||||
|
@ -62,17 +62,17 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
grid_original = geom.get_grid()
|
grid_original = geom.grid
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
microstructure = np.tile(geom.microstructure,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
|
material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
|
||||||
grid = np.array(microstructure.shape)
|
grid = np.array(material.shape)
|
||||||
|
|
||||||
# --- initialize support data ---------------------------------------------------------------------
|
# --- initialize support data ---------------------------------------------------------------------
|
||||||
|
|
||||||
# store a copy the initial microstructure to find locations of immutable indices
|
# store a copy of the initial material indices to find locations of immutable indices
|
||||||
microstructure_original = np.copy(microstructure)
|
material_original = np.copy(material)
|
||||||
|
|
||||||
if not options.ndimage:
|
if not options.ndimage:
|
||||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
||||||
|
@ -88,14 +88,14 @@ for name in filenames:
|
||||||
|
|
||||||
for smoothIter in range(options.N):
|
for smoothIter in range(options.N):
|
||||||
|
|
||||||
interfaceEnergy = np.zeros(microstructure.shape,dtype=np.float32)
|
interfaceEnergy = np.zeros(material.shape,dtype=np.float32)
|
||||||
for i in (-1,0,1):
|
for i in (-1,0,1):
|
||||||
for j in (-1,0,1):
|
for j in (-1,0,1):
|
||||||
for k in (-1,0,1):
|
for k in (-1,0,1):
|
||||||
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||||
interfaceEnergy = np.maximum(interfaceEnergy,
|
interfaceEnergy = np.maximum(interfaceEnergy,
|
||||||
getInterfaceEnergy(microstructure,np.roll(np.roll(np.roll(
|
getInterfaceEnergy(material,np.roll(np.roll(np.roll(
|
||||||
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
material,i,axis=0), j,axis=1), k,axis=2)))
|
||||||
|
|
||||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||||
|
@ -129,13 +129,13 @@ for name in filenames:
|
||||||
iterations = int(round(options.d*2.))-1),# fat boundary
|
iterations = int(round(options.d*2.))-1),# fat boundary
|
||||||
periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary...
|
periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary...
|
||||||
grid[1]//2:-grid[1]//2,
|
grid[1]//2:-grid[1]//2,
|
||||||
grid[2]//2:-grid[2]//2], # ...and zero everywhere else
|
grid[2]//2:-grid[2]//2], # ...and zero everywhere else
|
||||||
0.)).astype(np.complex64) *
|
0.)).astype(np.complex64) *
|
||||||
gauss).astype(np.float32)
|
gauss).astype(np.float32)
|
||||||
|
|
||||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||||
grid[1]//2:-grid[1]//2,
|
grid[1]//2:-grid[1]//2,
|
||||||
grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy
|
grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy
|
||||||
|
|
||||||
|
|
||||||
# transform voxels close to interface region
|
# transform voxels close to interface region
|
||||||
|
@ -143,33 +143,35 @@ for name in filenames:
|
||||||
return_distances = False,
|
return_distances = False,
|
||||||
return_indices = True) # want index of closest bulk grain
|
return_indices = True) # want index of closest bulk grain
|
||||||
|
|
||||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2,
|
periodic_material = np.tile(material,(3,3,3))[grid[0]//2:-grid[0]//2,
|
||||||
grid[1]//2:-grid[1]//2,
|
grid[1]//2:-grid[1]//2,
|
||||||
grid[2]//2:-grid[2]//2] # periodically extend the microstructure
|
grid[2]//2:-grid[2]//2] # periodically extend the geometry
|
||||||
|
|
||||||
microstructure = periodic_microstructure[index[0],
|
material = periodic_material[index[0],
|
||||||
index[1],
|
index[1],
|
||||||
index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2,
|
index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2,
|
||||||
grid[1]//2:-grid[1]//2,
|
grid[1]//2:-grid[1]//2,
|
||||||
grid[2]//2:-grid[2]//2] # extent grains into interface region
|
grid[2]//2:-grid[2]//2] # extent grains into interface region
|
||||||
|
|
||||||
# replace immutable microstructures with closest mutable ones
|
# replace immutable materials with closest mutable ones
|
||||||
index = ndimage.morphology.distance_transform_edt(np.in1d(microstructure,options.immutable).reshape(grid),
|
index = ndimage.morphology.distance_transform_edt(np.in1d(material,options.immutable).reshape(grid),
|
||||||
return_distances = False,
|
return_distances = False,
|
||||||
return_indices = True)
|
return_indices = True)
|
||||||
microstructure = microstructure[index[0],
|
material = material[index[0],
|
||||||
index[1],
|
index[1],
|
||||||
index[2]]
|
index[2]]
|
||||||
|
|
||||||
immutable = np.zeros(microstructure.shape, dtype=np.bool)
|
immutable = np.zeros(material.shape, dtype=np.bool)
|
||||||
# find locations where immutable microstructures have been in original structure
|
# find locations where immutable materials have been in original structure
|
||||||
for micro in options.immutable:
|
for micro in options.immutable:
|
||||||
immutable += microstructure_original == micro
|
immutable += material_original == micro
|
||||||
|
|
||||||
# undo any changes involving immutable microstructures
|
# undo any changes involving immutable materials
|
||||||
microstructure = np.where(immutable, microstructure_original,microstructure)
|
material = np.where(immutable, material_original,material)
|
||||||
|
|
||||||
geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])
|
damask.Geom(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
size = geom.size,
|
||||||
|
origin = geom.origin,
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])],
|
||||||
|
)\
|
||||||
|
.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -31,7 +31,7 @@ def binAsBins(bin,intervals):
|
||||||
bins[1] = (bin//intervals[2]) % intervals[1]
|
bins[1] = (bin//intervals[2]) % intervals[1]
|
||||||
bins[2] = bin % intervals[2]
|
bins[2] = bin % intervals[2]
|
||||||
return bins
|
return bins
|
||||||
|
|
||||||
def binsAsBin(bins,intervals):
|
def binsAsBin(bins,intervals):
|
||||||
"""Implode 3D bins into compound bin."""
|
"""Implode 3D bins into compound bin."""
|
||||||
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
||||||
|
@ -95,7 +95,7 @@ def directInversion (ODF,nSamples):
|
||||||
float(nInvSamples)/nOptSamples-1.0,
|
float(nInvSamples)/nOptSamples-1.0,
|
||||||
scale,nSamples))
|
scale,nSamples))
|
||||||
repetition = [None]*ODF['nBins'] # preallocate and clear
|
repetition = [None]*ODF['nBins'] # preallocate and clear
|
||||||
|
|
||||||
for bin in range(ODF['nBins']): # loop over bins
|
for bin in range(ODF['nBins']): # loop over bins
|
||||||
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ def directInversion (ODF,nSamples):
|
||||||
for bin in range(ODF['nBins']):
|
for bin in range(ODF['nBins']):
|
||||||
set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation
|
set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation
|
||||||
i += repetition[bin] # advance set counter
|
i += repetition[bin] # advance set counter
|
||||||
|
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
@ -117,7 +117,7 @@ def directInversion (ODF,nSamples):
|
||||||
orientations[j] = np.degrees(Eulers)
|
orientations[j] = np.degrees(Eulers)
|
||||||
reconstructedODF[bin] += unitInc
|
reconstructedODF[bin] += unitInc
|
||||||
set[ex] = set[j] # exchange orientations
|
set[ex] = set[j] # exchange orientations
|
||||||
|
|
||||||
return orientations, reconstructedODF
|
return orientations, reconstructedODF
|
||||||
|
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ def MonteCarloEulers (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
for j in range(nSamples):
|
for j in range(nSamples):
|
||||||
MC = maxdV_V*2.0
|
MC = maxdV_V*2.0
|
||||||
bin = 0
|
bin = 0
|
||||||
|
@ -153,7 +153,7 @@ def MonteCarloBins (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
for j in range(nSamples):
|
for j in range(nSamples):
|
||||||
MC = maxdV_V*2.0
|
MC = maxdV_V*2.0
|
||||||
bin = 0
|
bin = 0
|
||||||
|
@ -173,14 +173,14 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
selectors = [random.random() for i in range(nSamples)]
|
selectors = [random.random() for i in range(nSamples)]
|
||||||
selectors.sort()
|
selectors.sort()
|
||||||
indexSelector = 0
|
indexSelector = 0
|
||||||
|
|
||||||
cumdV_V = 0.0
|
cumdV_V = 0.0
|
||||||
countSamples = 0
|
countSamples = 0
|
||||||
|
|
||||||
for bin in range(ODF['nBins']) :
|
for bin in range(ODF['nBins']) :
|
||||||
cumdV_V += ODF['dV_V'][bin]
|
cumdV_V += ODF['dV_V'][bin]
|
||||||
while indexSelector < nSamples and selectors[indexSelector] < cumdV_V:
|
while indexSelector < nSamples and selectors[indexSelector] < cumdV_V:
|
||||||
|
@ -191,7 +191,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
indexSelector += 1
|
indexSelector += 1
|
||||||
|
|
||||||
damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
||||||
|
|
||||||
return orientations, reconstructedODF
|
return orientations, reconstructedODF
|
||||||
|
|
||||||
|
|
||||||
|
@ -233,8 +233,8 @@ if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
random.seed(randomSeed)
|
random.seed(randomSeed)
|
||||||
|
@ -253,7 +253,7 @@ for name in filenames:
|
||||||
if eulers.shape[0] != ODF['nBins']:
|
if eulers.shape[0] != ODF['nBins']:
|
||||||
damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0]))
|
damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# ----- build binnedODF array and normalize ------------------------------------------------------
|
# ----- build binnedODF array and normalize ------------------------------------------------------
|
||||||
sumdV_V = 0.0
|
sumdV_V = 0.0
|
||||||
ODF['dV_V'] = [None]*ODF['nBins']
|
ODF['dV_V'] = [None]*ODF['nBins']
|
||||||
|
@ -267,7 +267,7 @@ for name in filenames:
|
||||||
if ODF['dV_V'][b] > 0.0:
|
if ODF['dV_V'][b] > 0.0:
|
||||||
sumdV_V += ODF['dV_V'][b]
|
sumdV_V += ODF['dV_V'][b]
|
||||||
ODF['nNonZero'] += 1
|
ODF['nNonZero'] += 1
|
||||||
|
|
||||||
for b in range(ODF['nBins']):
|
for b in range(ODF['nBins']):
|
||||||
ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
||||||
|
|
||||||
|
@ -277,19 +277,19 @@ for name in filenames:
|
||||||
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
||||||
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
||||||
])
|
])
|
||||||
|
|
||||||
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
||||||
method = Functions[options.algorithm]
|
method = Functions[options.algorithm]
|
||||||
|
|
||||||
Orientations, ReconstructedODF = (globals()[method])(ODF,options.number)
|
Orientations, ReconstructedODF = (globals()[method])(ODF,options.number)
|
||||||
|
|
||||||
# calculate accuracy of sample
|
# calculate accuracy of sample
|
||||||
squaredDiff = {'orig':0.0,method:0.0}
|
squaredDiff = {'orig':0.0,method:0.0}
|
||||||
squaredRelDiff = {'orig':0.0,method:0.0}
|
squaredRelDiff = {'orig':0.0,method:0.0}
|
||||||
mutualProd = {'orig':0.0,method:0.0}
|
mutualProd = {'orig':0.0,method:0.0}
|
||||||
indivSum = {'orig':0.0,method:0.0}
|
indivSum = {'orig':0.0,method:0.0}
|
||||||
indivSquaredSum = {'orig':0.0,method:0.0}
|
indivSquaredSum = {'orig':0.0,method:0.0}
|
||||||
|
|
||||||
for bin in range(ODF['nBins']):
|
for bin in range(ODF['nBins']):
|
||||||
squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2
|
squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2
|
||||||
if ODF['dV_V'][bin] > 0.0:
|
if ODF['dV_V'][bin] > 0.0:
|
||||||
|
@ -299,7 +299,7 @@ for name in filenames:
|
||||||
indivSquaredSum[method] += ReconstructedODF[bin]**2
|
indivSquaredSum[method] += ReconstructedODF[bin]**2
|
||||||
indivSum['orig'] += ODF['dV_V'][bin]
|
indivSum['orig'] += ODF['dV_V'][bin]
|
||||||
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
||||||
|
|
||||||
damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]),
|
damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]),
|
||||||
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
||||||
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
||||||
|
@ -311,10 +311,10 @@ for name in filenames:
|
||||||
(ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\
|
(ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\
|
||||||
(indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))),
|
(indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))),
|
||||||
])
|
])
|
||||||
|
|
||||||
if method == 'IA' and options.number < ODF['nNonZero']:
|
if method == 'IA' and options.number < ODF['nNonZero']:
|
||||||
strOpt = '(%i)'%ODF['nNonZero']
|
strOpt = '(%i)'%ODF['nNonZero']
|
||||||
|
|
||||||
formatwidth = 1+int(math.log10(options.number))
|
formatwidth = 1+int(math.log10(options.number))
|
||||||
|
|
||||||
materialConfig = [
|
materialConfig = [
|
||||||
|
@ -324,12 +324,12 @@ for name in filenames:
|
||||||
'<microstructure>',
|
'<microstructure>',
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
]
|
]
|
||||||
|
|
||||||
for i,ID in enumerate(range(options.number)):
|
for i,ID in enumerate(range(options.number)):
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
||||||
]
|
]
|
||||||
|
|
||||||
materialConfig += [
|
materialConfig += [
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
'<texture>',
|
'<texture>',
|
||||||
|
@ -338,12 +338,12 @@ for name in filenames:
|
||||||
|
|
||||||
for ID in range(options.number):
|
for ID in range(options.number):
|
||||||
eulers = Orientations[ID]
|
eulers = Orientations[ID]
|
||||||
|
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
||||||
]
|
]
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile:
|
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile:
|
||||||
outfile.write('\n'.join(materialConfig)+'\n')
|
outfile.write('\n'.join(materialConfig)+'\n')
|
||||||
|
|
|
@ -42,7 +42,7 @@ def output(cmds,locals,dest):
|
||||||
else:
|
else:
|
||||||
outFile(str(cmd),locals,dest)
|
outFile(str(cmd),locals,dest)
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def init():
|
def init():
|
||||||
return [
|
return [
|
||||||
|
@ -100,7 +100,7 @@ def mesh(r,d):
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def material():
|
def materials():
|
||||||
return [\
|
return [\
|
||||||
"*new_mater standard",
|
"*new_mater standard",
|
||||||
"*mater_option general:state:solid",
|
"*mater_option general:state:solid",
|
||||||
|
@ -114,7 +114,7 @@ def material():
|
||||||
"*add_geometry_elements",
|
"*add_geometry_elements",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def geometry():
|
def geometry():
|
||||||
|
@ -127,14 +127,14 @@ def geometry():
|
||||||
"*element_type 7",
|
"*element_type 7",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def initial_conditions(microstructures):
|
def initial_conditions(material):
|
||||||
elements = []
|
elements = []
|
||||||
element = 0
|
element = 0
|
||||||
for id in microstructures:
|
for id in material:
|
||||||
element += 1
|
element += 1
|
||||||
if len(elements) < id:
|
if len(elements) < id:
|
||||||
for i in range(id-len(elements)):
|
for i in range(id-len(elements)):
|
||||||
elements.append([])
|
elements.append([])
|
||||||
|
@ -153,7 +153,7 @@ def initial_conditions(microstructures):
|
||||||
for grain,elementList in enumerate(elements):
|
for grain,elementList in enumerate(elements):
|
||||||
cmds.append([\
|
cmds.append([\
|
||||||
"*new_icond",
|
"*new_icond",
|
||||||
"*icond_name microstructure_%i"%(grain+1),
|
"*icond_name material_%i"%(grain+1),
|
||||||
"*icond_type state_variable",
|
"*icond_type state_variable",
|
||||||
"*icond_param_value state_var_id 2",
|
"*icond_param_value state_var_id 2",
|
||||||
"*icond_dof_value var %i"%(grain+1),
|
"*icond_dof_value var %i"%(grain+1),
|
||||||
|
@ -195,22 +195,22 @@ if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
microstructure = geom.get_microstructure().flatten(order='F')
|
material = geom.material.flatten(order='F')
|
||||||
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
init(),
|
init(),
|
||||||
mesh(geom.grid,geom.size),
|
mesh(geom.grid,geom.size),
|
||||||
material(),
|
materials(),
|
||||||
geometry(),
|
geometry(),
|
||||||
initial_conditions(microstructure),
|
initial_conditions(material),
|
||||||
'*identify_sets',
|
'*identify_sets',
|
||||||
'*show_model',
|
'*show_model',
|
||||||
'*redraw',
|
'*redraw',
|
||||||
'*draw_automatic',
|
'*draw_automatic',
|
||||||
]
|
]
|
||||||
|
|
||||||
outputLocals = {}
|
outputLocals = {}
|
||||||
if options.port:
|
if options.port:
|
||||||
py_mentat.py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
|
|
|
@ -30,7 +30,7 @@ class myThread (threading.Thread):
|
||||||
def run(self):
|
def run(self):
|
||||||
global bestSeedsUpdate
|
global bestSeedsUpdate
|
||||||
global bestSeedsVFile
|
global bestSeedsVFile
|
||||||
global nMicrostructures
|
global nMaterials
|
||||||
global delta
|
global delta
|
||||||
global points
|
global points
|
||||||
global target
|
global target
|
||||||
|
@ -70,7 +70,7 @@ class myThread (threading.Thread):
|
||||||
selectedMs = []
|
selectedMs = []
|
||||||
direction = []
|
direction = []
|
||||||
for i in range(NmoveGrains):
|
for i in range(NmoveGrains):
|
||||||
selectedMs.append(random.randrange(1,nMicrostructures))
|
selectedMs.append(random.randrange(1,nMaterials))
|
||||||
|
|
||||||
direction.append((np.random.random()-0.5)*delta)
|
direction.append((np.random.random()-0.5)*delta)
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ class myThread (threading.Thread):
|
||||||
perturbedSeedsVFile = StringIO()
|
perturbedSeedsVFile = StringIO()
|
||||||
myBestSeedsVFile.seek(0)
|
myBestSeedsVFile.seek(0)
|
||||||
|
|
||||||
perturbedSeedsTable = damask.Table.from_ASCII(myBestSeedsVFile)
|
perturbedSeedsTable = damask.Table.load(myBestSeedsVFile)
|
||||||
coords = perturbedSeedsTable.get('pos')
|
coords = perturbedSeedsTable.get('pos')
|
||||||
i = 0
|
i = 0
|
||||||
for ms,coord in enumerate(coords):
|
for ms,coord in enumerate(coords):
|
||||||
|
@ -89,8 +89,7 @@ class myThread (threading.Thread):
|
||||||
coords[i]=newCoords
|
coords[i]=newCoords
|
||||||
direction[i]*=2.
|
direction[i]*=2.
|
||||||
i+= 1
|
i+= 1
|
||||||
perturbedSeedsTable.set('pos',coords)
|
perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True)
|
||||||
perturbedSeedsTable.to_file(perturbedSeedsVFile)
|
|
||||||
|
|
||||||
#--- do tesselation with perturbed seed file ------------------------------------------------------
|
#--- do tesselation with perturbed seed file ------------------------------------------------------
|
||||||
perturbedGeomVFile.close()
|
perturbedGeomVFile.close()
|
||||||
|
@ -101,12 +100,12 @@ class myThread (threading.Thread):
|
||||||
perturbedGeomVFile.seek(0)
|
perturbedGeomVFile.seek(0)
|
||||||
|
|
||||||
#--- evaluate current seeds file ------------------------------------------------------------------
|
#--- evaluate current seeds file ------------------------------------------------------------------
|
||||||
perturbedGeom = damask.Geom.from_file(perturbedGeomVFile)
|
perturbedGeom = damask.Geom.load_ASCII(perturbedGeomVFile)
|
||||||
myNmicrostructures = len(np.unique(perturbedGeom.microstructure))
|
myNmaterials = len(np.unique(perturbedGeom.material))
|
||||||
currentData=np.bincount(perturbedGeom.microstructure.ravel())[1:]/points
|
currentData = np.bincount(perturbedGeom.material.ravel())[1:]/points
|
||||||
currentError=[]
|
currentError=[]
|
||||||
currentHist=[]
|
currentHist=[]
|
||||||
for i in range(nMicrostructures): # calculate the deviation in all bins per histogram
|
for i in range(nMaterials): # calculate the deviation in all bins per histogram
|
||||||
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
||||||
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
||||||
|
|
||||||
|
@ -118,12 +117,12 @@ class myThread (threading.Thread):
|
||||||
bestMatch = match
|
bestMatch = match
|
||||||
#--- count bin classes with no mismatch ----------------------------------------------------------------------
|
#--- count bin classes with no mismatch ----------------------------------------------------------------------
|
||||||
myMatch=0
|
myMatch=0
|
||||||
for i in range(nMicrostructures):
|
for i in range(nMaterials):
|
||||||
if currentError[i] > 0.0: break
|
if currentError[i] > 0.0: break
|
||||||
myMatch = i+1
|
myMatch = i+1
|
||||||
|
|
||||||
if myNmicrostructures == nMicrostructures:
|
if myNmaterials == nMaterials:
|
||||||
for i in range(min(nMicrostructures,myMatch+options.bins)):
|
for i in range(min(nMaterials,myMatch+options.bins)):
|
||||||
if currentError[i] > target[i]['error']: # worse fitting, next try
|
if currentError[i] > target[i]['error']: # worse fitting, next try
|
||||||
randReset = True
|
randReset = True
|
||||||
break
|
break
|
||||||
|
@ -142,25 +141,25 @@ class myThread (threading.Thread):
|
||||||
for line in perturbedSeedsVFile:
|
for line in perturbedSeedsVFile:
|
||||||
currentSeedsFile.write(line)
|
currentSeedsFile.write(line)
|
||||||
bestSeedsVFile.write(line)
|
bestSeedsVFile.write(line)
|
||||||
for j in range(nMicrostructures): # save new errors for all bins
|
for j in range(nMaterials): # save new errors for all bins
|
||||||
target[j]['error'] = currentError[j]
|
target[j]['error'] = currentError[j]
|
||||||
if myMatch > match: # one or more new bins have no deviation
|
if myMatch > match: # one or more new bins have no deviation
|
||||||
damask.util.croak( 'Stage {:d} cleared'.format(myMatch))
|
damask.util.croak( 'Stage {:d} cleared'.format(myMatch))
|
||||||
match=myMatch
|
match=myMatch
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
break
|
break
|
||||||
if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before: take it to keep on moving
|
if i == min(nMaterials,myMatch+options.bins)-1: # same quality as before: take it to keep on moving
|
||||||
bestSeedsUpdate = time.time()
|
bestSeedsUpdate = time.time()
|
||||||
perturbedSeedsVFile.seek(0)
|
perturbedSeedsVFile.seek(0)
|
||||||
bestSeedsVFile.close()
|
bestSeedsVFile.close()
|
||||||
bestSeedsVFile = StringIO()
|
bestSeedsVFile = StringIO()
|
||||||
bestSeedsVFile.writelines(perturbedSeedsVFile.readlines())
|
bestSeedsVFile.writelines(perturbedSeedsVFile.readlines())
|
||||||
for j in range(nMicrostructures):
|
for j in range(nMaterials):
|
||||||
target[j]['error'] = currentError[j]
|
target[j]['error'] = currentError[j]
|
||||||
randReset = True
|
randReset = True
|
||||||
else: #--- not all grains are tessellated
|
else: #--- not all grains are tessellated
|
||||||
damask.util.croak('Thread {:d}: Microstructure mismatch ({:d} microstructures mapped)'\
|
damask.util.croak('Thread {:d}: Material mismatch ({:d} material indices mapped)'\
|
||||||
.format(self.threadID,myNmicrostructures))
|
.format(self.threadID,myNmaterials))
|
||||||
randReset = True
|
randReset = True
|
||||||
|
|
||||||
|
|
||||||
|
@ -213,15 +212,15 @@ if options.randomSeed is None:
|
||||||
options.randomSeed = int(os.urandom(4).hex(),16)
|
options.randomSeed = int(os.urandom(4).hex(),16)
|
||||||
damask.util.croak(options.randomSeed)
|
damask.util.croak(options.randomSeed)
|
||||||
delta = options.scale/np.array(options.grid)
|
delta = options.scale/np.array(options.grid)
|
||||||
baseFile=os.path.splitext(os.path.basename(options.seedFile))[0]
|
baseFile = os.path.splitext(os.path.basename(options.seedFile))[0]
|
||||||
points = np.array(options.grid).prod().astype('float')
|
points = np.array(options.grid).prod().astype('float')
|
||||||
|
|
||||||
# ----------- calculate target distribution and bin edges
|
# ----------- calculate target distribution and bin edges
|
||||||
targetGeom = damask.Geom.from_file(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
|
targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
|
||||||
nMicrostructures = len(np.unique(targetGeom.microstructure))
|
nMaterials = len(np.unique(targetGeom.material))
|
||||||
targetVolFrac = np.bincount(targetGeom.microstructure.flatten())/targetGeom.grid.prod().astype(np.float)
|
targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.grid.prod().astype(np.float)
|
||||||
target=[]
|
target = []
|
||||||
for i in range(1,nMicrostructures+1):
|
for i in range(1,nMaterials+1):
|
||||||
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
||||||
target.append({'histogram':targetHist,'bins':targetBins})
|
target.append({'histogram':targetHist,'bins':targetBins})
|
||||||
|
|
||||||
|
@ -234,7 +233,7 @@ else:
|
||||||
bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\
|
bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\
|
||||||
' -g '+' '.join(list(map(str, options.grid)))+\
|
' -g '+' '.join(list(map(str, options.grid)))+\
|
||||||
' -r {:d}'.format(options.randomSeed)+\
|
' -r {:d}'.format(options.randomSeed)+\
|
||||||
' -N '+str(nMicrostructures))[0])
|
' -N '+str(nMaterials))[0])
|
||||||
bestSeedsUpdate = time.time()
|
bestSeedsUpdate = time.time()
|
||||||
|
|
||||||
# ----------- tessellate initial seed file to get and evaluate geom file
|
# ----------- tessellate initial seed file to get and evaluate geom file
|
||||||
|
@ -243,13 +242,13 @@ initialGeomVFile = StringIO()
|
||||||
initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
||||||
' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0])
|
' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0])
|
||||||
initialGeomVFile.seek(0)
|
initialGeomVFile.seek(0)
|
||||||
initialGeom = damask.Geom.from_file(initialGeomVFile)
|
initialGeom = damask.Geom.load_ASCII(initialGeomVFile)
|
||||||
|
|
||||||
if len(np.unique(targetGeom.microstructure)) != nMicrostructures:
|
if len(np.unique(targetGeom.material)) != nMaterials:
|
||||||
damask.util.croak('error. Microstructure count mismatch')
|
damask.util.croak('error. Material count mismatch')
|
||||||
|
|
||||||
initialData = np.bincount(initialGeom.microstructure.flatten())/points
|
initialData = np.bincount(initialGeom.material.flatten())/points
|
||||||
for i in range(nMicrostructures):
|
for i in range(nMaterials):
|
||||||
initialHist = np.histogram(initialData,bins=target[i]['bins'])[0]
|
initialHist = np.histogram(initialData,bins=target[i]['bins'])[0]
|
||||||
target[i]['error']=np.sqrt(np.square(np.array(target[i]['histogram']-initialHist)).sum())
|
target[i]['error']=np.sqrt(np.square(np.array(target[i]['histogram']-initialHist)).sum())
|
||||||
|
|
||||||
|
@ -258,13 +257,13 @@ if target[0]['error'] > 0.0:
|
||||||
target[0]['error'] *=((target[0]['bins'][0]-np.min(initialData))**2.0+
|
target[0]['error'] *=((target[0]['bins'][0]-np.min(initialData))**2.0+
|
||||||
(target[0]['bins'][1]-np.max(initialData))**2.0)**0.5
|
(target[0]['bins'][1]-np.max(initialData))**2.0)**0.5
|
||||||
match=0
|
match=0
|
||||||
for i in range(nMicrostructures):
|
for i in range(nMaterials):
|
||||||
if target[i]['error'] > 0.0: break
|
if target[i]['error'] > 0.0: break
|
||||||
match = i+1
|
match = i+1
|
||||||
|
|
||||||
|
|
||||||
if options.maxseeds < 1:
|
if options.maxseeds < 1:
|
||||||
maxSeeds = len(np.unique(initialGeom.microstructure))
|
maxSeeds = len(np.unique(initialGeom.material))
|
||||||
else:
|
else:
|
||||||
maxSeeds = options.maxseeds
|
maxSeeds = options.maxseeds
|
||||||
|
|
||||||
|
@ -273,8 +272,8 @@ sys.stdout.flush()
|
||||||
initialGeomVFile.close()
|
initialGeomVFile.close()
|
||||||
|
|
||||||
# start mulithreaded monte carlo simulation
|
# start mulithreaded monte carlo simulation
|
||||||
threads=[]
|
threads = []
|
||||||
s=threading.Semaphore(1)
|
s = threading.Semaphore(1)
|
||||||
|
|
||||||
for i in range(options.threads):
|
for i in range(options.threads):
|
||||||
threads.append(myThread(i))
|
threads.append(myThread(i))
|
||||||
|
|
|
@ -17,7 +17,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Create seed file taking microstructure indices from given geom file.
|
Create seed file taking material indices from given geom file.
|
||||||
Indices can be black-listed or white-listed.
|
Indices can be black-listed or white-listed.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
@ -46,12 +46,12 @@ options.blacklist = [int(i) for i in options.blacklist]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
microstructure = geom.get_microstructure().reshape((-1,1),order='F')
|
material = geom.material.reshape((-1,1),order='F')
|
||||||
|
|
||||||
mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \
|
mask = np.logical_and(np.in1d(material,options.whitelist,invert=False) if options.whitelist else \
|
||||||
np.full(geom.grid.prod(),True,dtype=bool),
|
np.full(geom.grid.prod(),True,dtype=bool),
|
||||||
np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \
|
np.in1d(material,options.blacklist,invert=True) if options.blacklist else \
|
||||||
np.full(geom.grid.prod(),True,dtype=bool))
|
np.full(geom.grid.prod(),True,dtype=bool))
|
||||||
|
|
||||||
seeds = damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F')
|
seeds = damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F')
|
||||||
|
@ -61,8 +61,8 @@ for name in filenames:
|
||||||
'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
|
'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
|
||||||
'size\tx {}\ty {}\tz {}'.format(*geom.size),
|
'size\tx {}\ty {}\tz {}'.format(*geom.size),
|
||||||
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
||||||
'homogenization\t{}'.format(geom.homogenization)]
|
]
|
||||||
|
|
||||||
table = damask.Table(seeds[mask],{'pos':(3,)},comments)
|
damask.Table(seeds[mask],{'pos':(3,)},comments)\
|
||||||
table = table.add('microstructure',microstructure[mask])
|
.add('material',material[mask].astype(int))\
|
||||||
table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds')
|
.save(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True)
|
||||||
|
|
|
@ -52,7 +52,7 @@ options.box = np.array(options.box).reshape(3,2)
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
|
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
|
||||||
box = np.amax(options.box, axis=1) \
|
box = np.amax(options.box, axis=1) \
|
||||||
|
@ -76,7 +76,7 @@ for name in filenames:
|
||||||
g[2] = k + offset[2]
|
g[2] = k + offset[2]
|
||||||
g %= geom.grid
|
g %= geom.grid
|
||||||
seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box
|
seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box
|
||||||
seeds[n, 3] = geom.microstructure[g[0],g[1],g[2]]
|
seeds[n, 3] = geom.material[g[0],g[1],g[2]]
|
||||||
if options.x: g[0] += 1
|
if options.x: g[0] += 1
|
||||||
if options.y: g[1] += 1
|
if options.y: g[1] += 1
|
||||||
n += 1
|
n += 1
|
||||||
|
@ -88,9 +88,9 @@ for name in filenames:
|
||||||
'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
|
'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
|
||||||
'size\tx {}\ty {}\tz {}'.format(*geom.size),
|
'size\tx {}\ty {}\tz {}'.format(*geom.size),
|
||||||
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
||||||
'homogenization\t{}'.format(geom.homogenization)]
|
]
|
||||||
|
|
||||||
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
|
table = damask.Table(seeds,{'pos':(3,),'material':(1,)},comments)
|
||||||
table.set('microstructure',table.get('microstructure').astype(np.int))
|
table.set('material',table.get('material').astype(np.int))\
|
||||||
table.to_file(sys.stdout if name is None else \
|
.save(sys.stdout if name is None else \
|
||||||
os.path.splitext(name)[0]+f'_poked_{options.N}.seeds')
|
os.path.splitext(name)[0]+f'_poked_{options.N}.seeds',legacy=True)
|
||||||
|
|
|
@ -154,12 +154,12 @@ for name in filenames:
|
||||||
'randomSeed\t{}'.format(options.randomSeed),
|
'randomSeed\t{}'.format(options.randomSeed),
|
||||||
]
|
]
|
||||||
|
|
||||||
table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)
|
table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)\
|
||||||
table = table.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int))
|
.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int))
|
||||||
|
|
||||||
if options.weights:
|
if options.weights:
|
||||||
weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \
|
weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \
|
||||||
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
|
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
|
||||||
table = table.add('weight',weights)
|
table = table.add('weight',weights)
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save(sys.stdout if name is None else name,legacy=True)
|
||||||
|
|
|
@ -235,100 +235,128 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname=None,format='ParaView'):
|
|
||||||
|
def save_paraview(self,fname=None):
|
||||||
"""
|
"""
|
||||||
Export colormap to file for use in external programs.
|
Write colormap to JSON file for Paraview.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : file, str, or pathlib.Path, optional.
|
fname : file, str, or pathlib.Path, optional.
|
||||||
Filename to store results. If not given, the filename will
|
Filename to store results. If not given, the filename will
|
||||||
consist of the name of the colormap and an extension that
|
consist of the name of the colormap and extension '.json'.
|
||||||
depends on the file format.
|
|
||||||
format : {'ParaView', 'ASCII', 'GOM', 'gmsh'}, optional
|
|
||||||
File format, defaults to 'ParaView'. Available formats are:
|
|
||||||
- ParaView: JSON file, extension '.json'.
|
|
||||||
- ASCII: Plain text file, extension '.txt'.
|
|
||||||
- GOM: Aramis GOM (DIC), extension '.legend'.
|
|
||||||
- Gmsh: Gmsh FEM mesh-generator, extension '.msh'.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if fname is not None:
|
if fname is not None:
|
||||||
try:
|
try:
|
||||||
f = open(fname,'w')
|
fhandle = open(fname,'w')
|
||||||
except TypeError:
|
except TypeError:
|
||||||
f = fname
|
fhandle = fname
|
||||||
else:
|
else:
|
||||||
f = None
|
fhandle = None
|
||||||
|
|
||||||
if format.lower() == 'paraview':
|
|
||||||
Colormap._export_paraview(self,f)
|
|
||||||
elif format.lower() == 'ascii':
|
|
||||||
Colormap._export_ASCII(self,f)
|
|
||||||
elif format.lower() == 'gom':
|
|
||||||
Colormap._export_GOM(self,f)
|
|
||||||
elif format.lower() == 'gmsh':
|
|
||||||
Colormap._export_gmsh(self,f)
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown output format: {format}.')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_paraview(colormap,fhandle=None):
|
|
||||||
"""Write colormap to JSON file for Paraview."""
|
|
||||||
colors = []
|
colors = []
|
||||||
for i,c in enumerate(np.round(colormap.colors,6).tolist()):
|
for i,c in enumerate(np.round(self.colors,6).tolist()):
|
||||||
colors+=[i]+c
|
colors+=[i]+c
|
||||||
|
|
||||||
out = [{
|
out = [{
|
||||||
'Creator':util.execution_stamp('Colormap'),
|
'Creator':util.execution_stamp('Colormap'),
|
||||||
'ColorSpace':'RGB',
|
'ColorSpace':'RGB',
|
||||||
'Name':colormap.name,
|
'Name':self.name,
|
||||||
'DefaultMap':True,
|
'DefaultMap':True,
|
||||||
'RGBPoints':colors
|
'RGBPoints':colors
|
||||||
}]
|
}]
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.json', 'w') as f:
|
with open(self.name.replace(' ','_')+'.json', 'w') as f:
|
||||||
json.dump(out, f,indent=4)
|
json.dump(out, f,indent=4)
|
||||||
else:
|
else:
|
||||||
json.dump(out,fhandle,indent=4)
|
json.dump(out,fhandle,indent=4)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_ASCII(colormap,fhandle=None):
|
def save_ASCII(self,fname=None):
|
||||||
"""Write colormap to ASCII table."""
|
"""
|
||||||
labels = {'RGBA':4} if colormap.colors.shape[1] == 4 else {'RGB': 3}
|
Write colormap to ASCII table.
|
||||||
t = Table(colormap.colors,labels,f'Creator: {util.execution_stamp("Colormap")}')
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.txt'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
|
|
||||||
|
labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3}
|
||||||
|
t = Table(self.colors,labels,f'Creator: {util.execution_stamp("Colormap")}')
|
||||||
|
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.txt', 'w') as f:
|
with open(self.name.replace(' ','_')+'.txt', 'w') as f:
|
||||||
t.to_file(f,new_style=True)
|
t.save(f)
|
||||||
else:
|
else:
|
||||||
t.to_file(fhandle,new_style=True)
|
t.save(fhandle)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_GOM(colormap,fhandle=None):
|
def save_GOM(self,fname=None):
|
||||||
"""Write colormap to GOM Aramis compatible format."""
|
"""
|
||||||
|
Write colormap to GOM Aramis compatible format.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.legend'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
# ToDo: test in GOM
|
# ToDo: test in GOM
|
||||||
GOM_str = f'1 1 {colormap.name.replace(" ","_")} 9 {colormap.name.replace(" ","_")} ' \
|
GOM_str = '1 1 {name} 9 {name} '.format(name=self.name.replace(" ","_")) \
|
||||||
+ '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \
|
+ '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \
|
||||||
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colormap.colors)}' \
|
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(self.colors)}' \
|
||||||
+ ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((colormap.colors*255).astype(int))]) \
|
+ ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((self.colors*255).astype(int))]) \
|
||||||
+ '\n'
|
+ '\n'
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.legend', 'w') as f:
|
with open(self.name.replace(' ','_')+'.legend', 'w') as f:
|
||||||
f.write(GOM_str)
|
f.write(GOM_str)
|
||||||
else:
|
else:
|
||||||
fhandle.write(GOM_str)
|
fhandle.write(GOM_str)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
def save_gmsh(self,fname=None):
|
||||||
def _export_gmsh(colormap,fhandle=None):
|
"""
|
||||||
"""Write colormap to Gmsh compatible format."""
|
Write colormap to Gmsh compatible format.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.msh'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
# ToDo: test in gmsh
|
# ToDo: test in gmsh
|
||||||
gmsh_str = 'View.ColorTable = {\n' \
|
gmsh_str = 'View.ColorTable = {\n' \
|
||||||
+'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in colormap.colors[:,:3]*255]) \
|
+'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in self.colors[:,:3]*255]) \
|
||||||
+'\n}\n'
|
+'\n}\n'
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.msh', 'w') as f:
|
with open(self.name.replace(' ','_')+'.msh', 'w') as f:
|
||||||
f.write(gmsh_str)
|
f.write(gmsh_str)
|
||||||
else:
|
else:
|
||||||
fhandle.write(gmsh_str)
|
fhandle.write(gmsh_str)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1100,7 +1100,7 @@ class Result:
|
||||||
pool.join()
|
pool.join()
|
||||||
|
|
||||||
|
|
||||||
def write_XDMF(self):
|
def save_XDMF(self):
|
||||||
"""
|
"""
|
||||||
Write XDMF file to directly visualize data in DADF5 file.
|
Write XDMF file to directly visualize data in DADF5 file.
|
||||||
|
|
||||||
|
@ -1196,7 +1196,7 @@ class Result:
|
||||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||||
|
|
||||||
|
|
||||||
def to_vtk(self,labels=[],mode='cell'):
|
def save_vtk(self,labels=[],mode='cell'):
|
||||||
"""
|
"""
|
||||||
Export to vtk cell/point data.
|
Export to vtk cell/point data.
|
||||||
|
|
||||||
|
@ -1268,4 +1268,4 @@ class Result:
|
||||||
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||||
v.add(u,'u')
|
v.add(u,'u')
|
||||||
|
|
||||||
v.to_file(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
v.save(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
||||||
|
|
|
@ -27,8 +27,11 @@ class Table:
|
||||||
self.comments = [] if comments_ is None else [c for c in comments_]
|
self.comments = [] if comments_ is None else [c for c in comments_]
|
||||||
self.data = pd.DataFrame(data=data)
|
self.data = pd.DataFrame(data=data)
|
||||||
self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
|
self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
|
||||||
self._label_condensed()
|
self._label_uniform()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Brief overview."""
|
||||||
|
return util.srepr(self.comments)+'\n'+self.data.__repr__()
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""Copy Table."""
|
"""Copy Table."""
|
||||||
|
@ -39,7 +42,7 @@ class Table:
|
||||||
return self.__copy__()
|
return self.__copy__()
|
||||||
|
|
||||||
|
|
||||||
def _label_flat(self):
|
def _label_discrete(self):
|
||||||
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
||||||
labels = []
|
labels = []
|
||||||
for label,shape in self.shapes.items():
|
for label,shape in self.shapes.items():
|
||||||
|
@ -48,8 +51,8 @@ class Table:
|
||||||
self.data.columns = labels
|
self.data.columns = labels
|
||||||
|
|
||||||
|
|
||||||
def _label_condensed(self):
|
def _label_uniform(self):
|
||||||
"""Label data condensed, e.g. 1_v 2_v 3_v ==> v v v."""
|
"""Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v."""
|
||||||
labels = []
|
labels = []
|
||||||
for label,shape in self.shapes.items():
|
for label,shape in self.shapes.items():
|
||||||
labels += [label] * int(np.prod(shape))
|
labels += [label] * int(np.prod(shape))
|
||||||
|
@ -64,12 +67,15 @@ class Table:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_ASCII(fname):
|
def load(fname):
|
||||||
"""
|
"""
|
||||||
Create table from ASCII file.
|
Load ASCII table file.
|
||||||
|
|
||||||
The first line can indicate the number of subsequent header lines as 'n header',
|
In legacy style, the first line indicates the number of
|
||||||
alternatively first line is the header and comments are marked by '#' ('new style').
|
subsequent header lines as "N header", with the last header line being
|
||||||
|
interpreted as column labels.
|
||||||
|
Alternatively, initial comments are marked by '#', with the first non-comment line
|
||||||
|
containing the column labels.
|
||||||
Vector data column labels are indicated by '1_v, 2_v, ..., n_v'.
|
Vector data column labels are indicated by '1_v, 2_v, ..., n_v'.
|
||||||
Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'.
|
Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'.
|
||||||
|
|
||||||
|
@ -119,9 +125,9 @@ class Table:
|
||||||
return Table(data,shapes,comments)
|
return Table(data,shapes,comments)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_ang(fname):
|
def load_ang(fname):
|
||||||
"""
|
"""
|
||||||
Create table from TSL ang file.
|
Load ang file.
|
||||||
|
|
||||||
A valid TSL ang file needs to contains the following columns:
|
A valid TSL ang file needs to contains the following columns:
|
||||||
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
||||||
|
@ -289,9 +295,9 @@ class Table:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._label_flat()
|
dup._label_discrete()
|
||||||
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
||||||
dup._label_condensed()
|
dup._label_uniform()
|
||||||
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
@ -338,59 +344,38 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname,format='ASCII',new_style=False):
|
def save(self,fname,legacy=False):
|
||||||
"""
|
"""
|
||||||
Store as plain text file.
|
Save as plain text file.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : file, str, or pathlib.Path
|
fname : file, str, or pathlib.Path
|
||||||
Filename or file for writing.
|
Filename or file for writing.
|
||||||
format : {ASCII'}, optional
|
legacy : Boolean, optional
|
||||||
File format, defaults to 'ASCII'. Available formats are:
|
Write table in legacy style, indicating header lines by "N header"
|
||||||
- ASCII: Plain text file, extension '.txt'.
|
in contrast to using comment sign ('#') at beginning of lines.
|
||||||
new_style : Boolean, optional
|
|
||||||
Write table in new style, indicating header lines by comment sign ('#') only.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def _to_ASCII(table,fname,new_style=False):
|
seen = set()
|
||||||
"""
|
labels = []
|
||||||
Store as plain text file.
|
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
|
||||||
|
if self.shapes[l] == (1,):
|
||||||
|
labels.append(f'{l}')
|
||||||
|
elif len(self.shapes[l]) == 1:
|
||||||
|
labels += [f'{i+1}_{l}' \
|
||||||
|
for i in range(self.shapes[l][0])]
|
||||||
|
else:
|
||||||
|
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
|
||||||
|
for i in range(np.prod(self.shapes[l]))]
|
||||||
|
|
||||||
Parameters
|
header = ([f'{len(self.comments)+1} header'] + self.comments) if legacy else \
|
||||||
----------
|
[f'# {comment}' for comment in self.comments]
|
||||||
table : Table object
|
|
||||||
Table to write.
|
|
||||||
fname : file, str, or pathlib.Path
|
|
||||||
Filename or file for writing.
|
|
||||||
new_style : Boolean, optional
|
|
||||||
Write table in new style, indicating header lines by comment sign ('#') only.
|
|
||||||
|
|
||||||
"""
|
try:
|
||||||
seen = set()
|
fhandle = open(fname,'w')
|
||||||
labels = []
|
except TypeError:
|
||||||
for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]:
|
fhandle = fname
|
||||||
if table.shapes[l] == (1,):
|
|
||||||
labels.append(f'{l}')
|
|
||||||
elif len(table.shapes[l]) == 1:
|
|
||||||
labels += [f'{i+1}_{l}' \
|
|
||||||
for i in range(table.shapes[l][0])]
|
|
||||||
else:
|
|
||||||
labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \
|
|
||||||
for i in range(np.prod(table.shapes[l]))]
|
|
||||||
|
|
||||||
header = [f'# {comment}' for comment in table.comments] if new_style else \
|
for line in header + [' '.join(labels)]: fhandle.write(line+'\n')
|
||||||
[f'{len(table.comments)+1} header'] + table.comments
|
self.data.to_csv(fhandle,sep=' ',na_rep='nan',index=False,header=False)
|
||||||
|
|
||||||
try:
|
|
||||||
f = open(fname,'w')
|
|
||||||
except TypeError:
|
|
||||||
f = fname
|
|
||||||
|
|
||||||
for line in header + [' '.join(labels)]: f.write(line+'\n')
|
|
||||||
table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
|
|
||||||
|
|
||||||
if format.lower() == 'ascii':
|
|
||||||
return _to_ASCII(self,fname,new_style)
|
|
||||||
else:
|
|
||||||
raise TypeError(f'Unknown format {format}.')
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ class Test:
|
||||||
|
|
||||||
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
||||||
|
|
||||||
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
source = os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
||||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
|
@ -287,30 +287,30 @@ class Test:
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
logging.info('\n '.join(['comparing',File1,File2]))
|
logging.info('\n '.join(['comparing',File1,File2]))
|
||||||
table = damask.Table.from_ASCII(File1)
|
table = damask.Table.load(File1)
|
||||||
len1=len(table.comments)+2
|
len1 = len(table.comments)+2
|
||||||
table = damask.Table.from_ASCII(File2)
|
table = damask.Table.load(File2)
|
||||||
len2=len(table.comments)+2
|
len2 = len(table.comments)+2
|
||||||
|
|
||||||
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
||||||
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
||||||
|
|
||||||
if len(curArray) == len(refArray):
|
if len(curArray) == len(refArray):
|
||||||
refArrayNonZero = refArray[refArray.nonzero()]
|
refArrayNonZero = refArray[refArray.nonzero()]
|
||||||
curArray = curArray[refArray.nonzero()]
|
curArray = curArray[refArray.nonzero()]
|
||||||
max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
max_err = np. max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||||
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
max_loc = np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||||
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
||||||
curArray = curArray[curArray.nonzero()]
|
curArray = curArray[curArray.nonzero()]
|
||||||
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
||||||
return max_err
|
return max_err
|
||||||
else:
|
else:
|
||||||
raise Exception('mismatch in array size to compare')
|
raise Exception(f'mismatch in array sizes ({len(refArray)} and {len(curArray)}) to compare')
|
||||||
|
|
||||||
|
|
||||||
def compare_ArrayRefCur(self,ref,cur=''):
|
def compare_ArrayRefCur(self,ref,cur=''):
|
||||||
|
|
||||||
if cur =='': cur = ref
|
if cur == '': cur = ref
|
||||||
refName = self.fileInReference(ref)
|
refName = self.fileInReference(ref)
|
||||||
curName = self.fileInCurrent(cur)
|
curName = self.fileInCurrent(cur)
|
||||||
return self.compare_Array(refName,curName)
|
return self.compare_Array(refName,curName)
|
||||||
|
@ -331,7 +331,7 @@ class Test:
|
||||||
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
||||||
if normHeadings == '': normHeadings = headings0
|
if normHeadings == '': normHeadings = headings0
|
||||||
|
|
||||||
# check if comparison is possible and determine lenght of columns
|
# check if comparison is possible and determine length of columns
|
||||||
if len(headings0) == len(headings1) == len(normHeadings):
|
if len(headings0) == len(headings1) == len(normHeadings):
|
||||||
dataLength = len(headings0)
|
dataLength = len(headings0)
|
||||||
length = [1 for i in range(dataLength)]
|
length = [1 for i in range(dataLength)]
|
||||||
|
@ -399,10 +399,8 @@ class Test:
|
||||||
if any(norm[i]) == 0.0 or absTol[i]:
|
if any(norm[i]) == 0.0 or absTol[i]:
|
||||||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||||
absTol[i] = True
|
absTol[i] = True
|
||||||
if perLine:
|
logging.warning(f'''{"At least one" if perLine else "Maximum"} norm of
|
||||||
logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
"{headings0[i]['label']}" in first table is 0.0, using absolute tolerance''')
|
||||||
else:
|
|
||||||
logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
|
||||||
|
|
||||||
line1 = 0
|
line1 = 0
|
||||||
while table1.data_read(): # read next data line of ASCII table
|
while table1.data_read(): # read next data line of ASCII table
|
||||||
|
@ -418,20 +416,18 @@ class Test:
|
||||||
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
for i in range(dataLength):
|
for i in range(dataLength):
|
||||||
if absTol[i]:
|
logging.info(f''' * maximum {'absolute' if absTol[i] else 'relative'} error {maxError[i]}
|
||||||
logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
between {headings0[i]['label']} and {headings1[i]['label']}''')
|
||||||
else:
|
|
||||||
logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
return maxError
|
return maxError
|
||||||
|
|
||||||
|
|
||||||
def compare_TablesStatistically(self,
|
def compare_TablesStatistically(self,
|
||||||
files = [None,None], # list of file names
|
files = [None,None], # list of file names
|
||||||
columns = [None], # list of list of column labels (per file)
|
columns = [None], # list of list of column labels (per file)
|
||||||
meanTol = 1.0e-4,
|
meanTol = 1.0e-4,
|
||||||
stdTol = 1.0e-6,
|
stdTol = 1.0e-6,
|
||||||
preFilter = 1.0e-9):
|
preFilter = 1.0e-9):
|
||||||
"""
|
"""
|
||||||
Calculate statistics of tables.
|
Calculate statistics of tables.
|
||||||
|
|
||||||
|
@ -440,9 +436,9 @@ class Test:
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
tables = [damask.Table.from_ASCII(filename) for filename in files]
|
tables = [damask.Table.load(filename) for filename in files]
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table._label_flat()
|
table._label_discrete()
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
@ -462,7 +458,7 @@ class Test:
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for table,labels in zip(tables,columns):
|
for table,labels in zip(tables,columns):
|
||||||
table._label_condensed()
|
table._label_uniform()
|
||||||
data.append(np.hstack(list(table.get(label) for label in labels)))
|
data.append(np.hstack(list(table.get(label) for label in labels)))
|
||||||
|
|
||||||
|
|
||||||
|
@ -471,12 +467,11 @@ class Test:
|
||||||
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
||||||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||||
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
||||||
std = np.amax(np.std(normedDelta,0))
|
std = np.amax(np.std(normedDelta,0))
|
||||||
logging.info(f'mean: {mean:f}')
|
logging.info(f'mean: {mean:f}')
|
||||||
logging.info(f'std: {std:f}')
|
logging.info(f'std: {std:f}')
|
||||||
|
|
||||||
return (mean<meanTol) & (std < stdTol)
|
return (mean < meanTol) & (std < stdTol)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def compare_Tables(self,
|
def compare_Tables(self,
|
||||||
|
@ -491,7 +486,7 @@ class Test:
|
||||||
|
|
||||||
if len(files) < 2: return True # single table is always close to itself...
|
if len(files) < 2: return True # single table is always close to itself...
|
||||||
|
|
||||||
tables = [damask.Table.from_ASCII(filename) for filename in files]
|
tables = [damask.Table.load(filename) for filename in files]
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
@ -580,7 +575,7 @@ class Test:
|
||||||
|
|
||||||
if culprit == 0:
|
if culprit == 0:
|
||||||
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
||||||
msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
|
msg = ('Test passed.' if count == 1 else f'All {count} tests passed.') + '\a\a\a'
|
||||||
elif culprit == -1:
|
elif culprit == -1:
|
||||||
msg = 'Warning: could not start test...'
|
msg = 'Warning: could not start test...'
|
||||||
ret = 0
|
ret = 0
|
||||||
|
|
|
@ -118,7 +118,7 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_file(fname,dataset_type=None):
|
def load(fname,dataset_type=None):
|
||||||
"""
|
"""
|
||||||
Create VTK from file.
|
Create VTK from file.
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ class VTK:
|
||||||
def _write(writer):
|
def _write(writer):
|
||||||
"""Wrapper for parallel writing."""
|
"""Wrapper for parallel writing."""
|
||||||
writer.Write()
|
writer.Write()
|
||||||
def to_file(self,fname,parallel=True,compress=True):
|
def save(self,fname,parallel=True,compress=True):
|
||||||
"""
|
"""
|
||||||
Write to file.
|
Write to file.
|
||||||
|
|
||||||
|
@ -178,6 +178,8 @@ class VTK:
|
||||||
Filename for writing.
|
Filename for writing.
|
||||||
parallel : boolean, optional
|
parallel : boolean, optional
|
||||||
Write data in parallel background process. Defaults to True.
|
Write data in parallel background process. Defaults to True.
|
||||||
|
compress : bool, optional
|
||||||
|
Compress with zlib algorithm. Defaults to True.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
|
if isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
|
||||||
|
|
|
@ -117,6 +117,7 @@ def execute(cmd,
|
||||||
initialPath = os.getcwd()
|
initialPath = os.getcwd()
|
||||||
myEnv = os.environ if env is None else env
|
myEnv = os.environ if env is None else env
|
||||||
os.chdir(wd)
|
os.chdir(wd)
|
||||||
|
print(f"executing '{cmd}' in '{wd}'")
|
||||||
process = subprocess.Popen(shlex.split(cmd),
|
process = subprocess.Popen(shlex.split(cmd),
|
||||||
stdout = subprocess.PIPE,
|
stdout = subprocess.PIPE,
|
||||||
stderr = subprocess.PIPE,
|
stderr = subprocess.PIPE,
|
||||||
|
@ -128,7 +129,7 @@ def execute(cmd,
|
||||||
stdout = stdout.decode('utf-8').replace('\x08','')
|
stdout = stdout.decode('utf-8').replace('\x08','')
|
||||||
stderr = stderr.decode('utf-8').replace('\x08','')
|
stderr = stderr.decode('utf-8').replace('\x08','')
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
raise RuntimeError(f'{cmd} failed with returncode {process.returncode}')
|
raise RuntimeError(f"'{cmd}' failed with returncode {process.returncode}")
|
||||||
return stdout, stderr
|
return stdout, stderr
|
||||||
|
|
||||||
|
|
||||||
|
@ -172,8 +173,9 @@ def scale_to_coprime(v):
|
||||||
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
||||||
m = m//reduce(np.gcd,m)
|
m = m//reduce(np.gcd,m)
|
||||||
|
|
||||||
if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]):
|
with np.errstate(invalid='ignore'):
|
||||||
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
|
if not np.allclose(np.ma.masked_invalid(v/m),v[np.argmax(abs(v))]/m[np.argmax(abs(v))]):
|
||||||
|
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw
|
AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q==
|
AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q==
|
AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU=
|
AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ=
|
AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA==
|
AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="2" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="2" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU=
|
AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh
|
AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -0,0 +1,276 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import damask\n",
|
||||||
|
"\n",
|
||||||
|
"from pathlib import Path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"orientations,rODF = damask.Rotation.from_ODF('hybridIA_ODF.txt',\n",
|
||||||
|
" 2**14,\n",
|
||||||
|
" degrees=True,\n",
|
||||||
|
" reconstruct=True,\n",
|
||||||
|
" fractions=True,\n",
|
||||||
|
" seed=0)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"VTK = damask.VTK.from_rectilinearGrid([36,36,36],[90,90,90])\n",
|
||||||
|
"VTK.add(damask.Table.from_ASCII('hybridIA_ODF.txt').get('intensity'),'intensity')\n",
|
||||||
|
"VTK.add(rODF.flatten(order='F'),'rODF')\n",
|
||||||
|
"VTK.to_file('hybridIA_ODF.vtr')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 16,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Help on class VTK in module damask._vtk:\n",
|
||||||
|
"\n",
|
||||||
|
"class VTK(builtins.object)\n",
|
||||||
|
" | VTK(geom)\n",
|
||||||
|
" | \n",
|
||||||
|
" | Spatial visualization (and potentially manipulation).\n",
|
||||||
|
" | \n",
|
||||||
|
" | High-level interface to VTK.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Methods defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | __init__(self, geom)\n",
|
||||||
|
" | Set geometry and topology.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | geom : subclass of vtk.vtkDataSet\n",
|
||||||
|
" | Description of geometry and topology. Valid types are vtk.vtkRectilinearGrid,\n",
|
||||||
|
" | vtk.vtkUnstructuredGrid, or vtk.vtkPolyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | __repr__(self)\n",
|
||||||
|
" | ASCII representation of the VTK data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | add(self, data, label=None)\n",
|
||||||
|
" | Add data to either cells or points.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | data : numpy.ndarray\n",
|
||||||
|
" | Data to add. First dimension need to match either\n",
|
||||||
|
" | number of cells or number of points\n",
|
||||||
|
" | label : str\n",
|
||||||
|
" | Data label.\n",
|
||||||
|
" | \n",
|
||||||
|
" | add_comments(self, comments)\n",
|
||||||
|
" | Add Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | comments : str or list of str\n",
|
||||||
|
" | Comments to add.\n",
|
||||||
|
" | \n",
|
||||||
|
" | get(self, label)\n",
|
||||||
|
" | Get either cell or point data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Cell data takes precedence over point data, i.e. this\n",
|
||||||
|
" | function assumes that labels are unique among cell and\n",
|
||||||
|
" | point data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | label : str\n",
|
||||||
|
" | Data label.\n",
|
||||||
|
" | \n",
|
||||||
|
" | get_comments(self)\n",
|
||||||
|
" | Return the comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | set_comments(self, comments)\n",
|
||||||
|
" | Set Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | comments : str or list of str\n",
|
||||||
|
" | Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | show(self)\n",
|
||||||
|
" | Render.\n",
|
||||||
|
" | \n",
|
||||||
|
" | See http://compilatrix.com/article/vtk-1 for further ideas.\n",
|
||||||
|
" | \n",
|
||||||
|
" | write(self, fname, parallel=True)\n",
|
||||||
|
" | Write to file.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | fname : str or pathlib.Path\n",
|
||||||
|
" | Filename for writing.\n",
|
||||||
|
" | parallel : boolean, optional\n",
|
||||||
|
" | Write data in parallel background process. Defaults to True.\n",
|
||||||
|
" | \n",
|
||||||
|
" | ----------------------------------------------------------------------\n",
|
||||||
|
" | Static methods defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_file(fname, dataset_type=None)\n",
|
||||||
|
" | Create VTK from file.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | fname : str or pathlib.Path\n",
|
||||||
|
" | Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.\n",
|
||||||
|
" | dataset_type : str, optional\n",
|
||||||
|
" | Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,\n",
|
||||||
|
" | vtkUnstructuredGrid, and vtkPolyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_polyData(points)\n",
|
||||||
|
" | Create VTK of type vtk.polyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for point-wise data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | points : numpy.ndarray of shape (:,3)\n",
|
||||||
|
" | Spatial position of the points.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_rectilinearGrid(grid, size, origin=array([0., 0., 0.]))\n",
|
||||||
|
" | Create VTK of type vtk.vtkRectilinearGrid.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for results from the grid solver.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | grid : numpy.ndarray of shape (3) of np.dtype = int\n",
|
||||||
|
" | Number of cells.\n",
|
||||||
|
" | size : numpy.ndarray of shape (3)\n",
|
||||||
|
" | Physical length.\n",
|
||||||
|
" | origin : numpy.ndarray of shape (3), optional\n",
|
||||||
|
" | Spatial origin.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_unstructuredGrid(nodes, connectivity, cell_type)\n",
|
||||||
|
" | Create VTK of type vtk.vtkUnstructuredGrid.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for results from FEM solvers.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | nodes : numpy.ndarray of shape (:,3)\n",
|
||||||
|
" | Spatial position of the nodes.\n",
|
||||||
|
" | connectivity : numpy.ndarray of np.dtype = int\n",
|
||||||
|
" | Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.\n",
|
||||||
|
" | cell_type : str\n",
|
||||||
|
" | Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.\n",
|
||||||
|
" | \n",
|
||||||
|
" | ----------------------------------------------------------------------\n",
|
||||||
|
" | Data descriptors defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | __dict__\n",
|
||||||
|
" | dictionary for instance variables (if defined)\n",
|
||||||
|
" | \n",
|
||||||
|
" | __weakref__\n",
|
||||||
|
" | list of weak references to the object (if defined)\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"help(damask.VTK)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 18,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"a,b=np.radians(([90,90],[45,45]))"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 19,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"array([1.57079633, 1.57079633])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 19,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"a"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"array([0.78539816, 0.78539816])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"b"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 4
|
||||||
|
}
|
|
@ -75,41 +75,36 @@ class TestColormap:
|
||||||
assert np.allclose(Colormap._xyz2msh(xyz),msh,atol=1.e-6,rtol=0)
|
assert np.allclose(Colormap._xyz2msh(xyz),msh,atol=1.e-6,rtol=0)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh'])
|
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
|
||||||
@pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh'])
|
@pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh'])
|
||||||
def test_from_range(self,model,format,tmpdir):
|
def test_from_range(self,model,format,tmpdir):
|
||||||
N = np.random.randint(2,256)
|
N = np.random.randint(2,256)
|
||||||
c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N)
|
c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N) # noqa
|
||||||
c.to_file(tmpdir/'color_out',format=format)
|
eval(f'c.save_{format}(tmpdir/"color_out")')
|
||||||
|
|
||||||
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh'])
|
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
|
||||||
@pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis'])
|
@pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis'])
|
||||||
def test_from_predefined(self,name,format,tmpdir):
|
def test_from_predefined(self,name,format,tmpdir):
|
||||||
N = np.random.randint(2,256)
|
N = np.random.randint(2,256)
|
||||||
c = Colormap.from_predefined(name,N)
|
c = Colormap.from_predefined(name,N) # noqa
|
||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
|
|
||||||
@pytest.mark.parametrize('format,name',[('ASCII','test.txt'),
|
@pytest.mark.parametrize('format,name',[('ASCII','test.txt'),
|
||||||
('paraview','test.json'),
|
('paraview','test.json'),
|
||||||
('GOM','test.legend'),
|
('GOM','test.legend'),
|
||||||
('Gmsh','test.msh')
|
('gmsh','test.msh')
|
||||||
])
|
])
|
||||||
def test_write_filehandle(self,format,name,tmpdir):
|
def test_write_filehandle(self,format,name,tmpdir):
|
||||||
c = Colormap.from_predefined('Dark2')
|
c = Colormap.from_predefined('Dark2') # noqa
|
||||||
fname = tmpdir/name
|
fname = tmpdir/name
|
||||||
with open(fname,'w') as f:
|
with open(fname,'w') as f: # noqa
|
||||||
c.to_file(f,format=format)
|
eval(f'c.save_{format}(f)')
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
if fname.exists(): return
|
if fname.exists(): return
|
||||||
time.sleep(.5)
|
time.sleep(.5)
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
def test_write_invalid_format(self):
|
|
||||||
c = Colormap.from_predefined('Dark2')
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
c.to_file(format='invalid')
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model',['rgb','hsv','hsl','lab','invalid'])
|
@pytest.mark.parametrize('model',['rgb','hsv','hsl','lab','invalid'])
|
||||||
def test_invalid_color(self,model):
|
def test_invalid_color(self,model):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -119,13 +114,13 @@ class TestColormap:
|
||||||
c_1 = Colormap.from_predefined('stress')
|
c_1 = Colormap.from_predefined('stress')
|
||||||
c_2 = c_1.reversed()
|
c_2 = c_1.reversed()
|
||||||
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
||||||
np.allclose(c_1.colors,c_2.reversed().colors)
|
np.allclose(c_1.colors,c_2.reversed().colors)
|
||||||
|
|
||||||
def test_invert(self):
|
def test_invert(self):
|
||||||
c_1 = Colormap.from_predefined('strain')
|
c_1 = Colormap.from_predefined('strain')
|
||||||
c_2 = ~c_1
|
c_2 = ~c_1
|
||||||
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
assert (not np.allclose(c_1.colors, c_2.colors)) and \
|
||||||
np.allclose(c_1.colors,(~c_2).colors)
|
np.allclose(c_1.colors,(~c_2).colors)
|
||||||
|
|
||||||
def test_add(self):
|
def test_add(self):
|
||||||
c = Colormap.from_predefined('jet')
|
c = Colormap.from_predefined('jet')
|
||||||
|
@ -149,16 +144,16 @@ class TestColormap:
|
||||||
@pytest.mark.parametrize('format,ext',[('ASCII','.txt'),
|
@pytest.mark.parametrize('format,ext',[('ASCII','.txt'),
|
||||||
('paraview','.json'),
|
('paraview','.json'),
|
||||||
('GOM','.legend'),
|
('GOM','.legend'),
|
||||||
('Gmsh','.msh')
|
('gmsh','.msh')
|
||||||
])
|
])
|
||||||
def test_compare_reference(self,format,ext,tmpdir,reference_dir,update):
|
def test_compare_reference(self,format,ext,tmpdir,reference_dir,update):
|
||||||
name = 'binary'
|
name = 'binary'
|
||||||
c = Colormap.from_predefined(name)
|
c = Colormap.from_predefined(name) # noqa
|
||||||
if update:
|
if update:
|
||||||
os.chdir(reference_dir)
|
os.chdir(reference_dir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
else:
|
else:
|
||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
time.sleep(.5)
|
time.sleep(.5)
|
||||||
assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext))
|
assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext))
|
||||||
|
|
|
@ -11,9 +11,9 @@ from damask import util
|
||||||
|
|
||||||
|
|
||||||
def geom_equal(a,b):
|
def geom_equal(a,b):
|
||||||
return np.all(a.get_microstructure() == b.get_microstructure()) and \
|
return np.all(a.material == b.material) and \
|
||||||
np.all(a.get_grid() == b.get_grid()) and \
|
np.all(a.grid == b.grid) and \
|
||||||
np.allclose(a.get_size(), b.get_size()) and \
|
np.allclose(a.size, b.size) and \
|
||||||
str(a.diff(b)) == str(b.diff(a))
|
str(a.diff(b)) == str(b.diff(a))
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -33,115 +33,89 @@ def reference_dir(reference_dir_base):
|
||||||
|
|
||||||
class TestGeom:
|
class TestGeom:
|
||||||
|
|
||||||
@pytest.mark.parametrize('flavor',['plain','explicit'])
|
|
||||||
def test_duplicate(self,default,flavor):
|
|
||||||
if flavor == 'plain':
|
|
||||||
modified = default.duplicate()
|
|
||||||
elif flavor == 'explicit':
|
|
||||||
modified = default.duplicate(
|
|
||||||
default.get_microstructure(),
|
|
||||||
default.get_size(),
|
|
||||||
default.get_origin()
|
|
||||||
)
|
|
||||||
print(modified)
|
|
||||||
assert geom_equal(default,modified)
|
|
||||||
|
|
||||||
def test_diff_equal(self,default):
|
def test_diff_equal(self,default):
|
||||||
assert str(default.diff(default)) == ''
|
assert str(default.diff(default)) == ''
|
||||||
|
|
||||||
|
|
||||||
def test_diff_not_equal(self,default):
|
def test_diff_not_equal(self,default):
|
||||||
new = Geom(default.microstructure[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified'])
|
new = Geom(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified'])
|
||||||
assert str(default.diff(new)) != ''
|
assert str(default.diff(new)) != ''
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('masked',[True,False])
|
|
||||||
def test_set_microstructure(self,default,masked):
|
|
||||||
old = default.get_microstructure()
|
|
||||||
new = np.random.randint(200,size=default.grid)
|
|
||||||
default.set_microstructure(np.ma.MaskedArray(new,np.full_like(new,masked)))
|
|
||||||
assert np.all(default.microstructure==(old if masked else new))
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_read_str(self,default,tmpdir):
|
def test_write_read_str(self,default,tmpdir):
|
||||||
default.to_file(str(tmpdir/'default.geom'),format='ASCII')
|
default.save_ASCII(str(tmpdir/'default.geom'))
|
||||||
new = Geom.from_file(str(tmpdir/'default.geom'))
|
new = Geom.load_ASCII(str(tmpdir/'default.geom'))
|
||||||
assert geom_equal(default,new)
|
assert geom_equal(default,new)
|
||||||
|
|
||||||
|
|
||||||
def test_write_read_file(self,default,tmpdir):
|
def test_write_read_file(self,default,tmpdir):
|
||||||
with open(tmpdir/'default.geom','w') as f:
|
with open(tmpdir/'default.geom','w') as f:
|
||||||
default.to_file(f,format='ASCII',pack=True)
|
default.save_ASCII(f,compress=True)
|
||||||
with open(tmpdir/'default.geom') as f:
|
with open(tmpdir/'default.geom') as f:
|
||||||
new = Geom.from_file(f)
|
new = Geom.load_ASCII(f)
|
||||||
assert geom_equal(default,new)
|
assert geom_equal(default,new)
|
||||||
|
|
||||||
def test_write_as_ASCII(self,default,tmpdir):
|
|
||||||
with open(tmpdir/'str.geom','w') as f:
|
|
||||||
f.write(default.as_ASCII())
|
|
||||||
with open(tmpdir/'str.geom') as f:
|
|
||||||
new = Geom.from_file(f)
|
|
||||||
assert geom_equal(default,new)
|
|
||||||
|
|
||||||
def test_read_write_vtr(self,default,tmpdir):
|
def test_read_write_vtr(self,default,tmpdir):
|
||||||
default.to_file(tmpdir/'default',format='vtr')
|
default.save(tmpdir/'default')
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(tmpdir/'default.vtr'): break
|
if os.path.exists(tmpdir/'default.vtr'): break
|
||||||
|
|
||||||
new = Geom.from_vtr(tmpdir/'default.vtr')
|
new = Geom.load(tmpdir/'default.vtr')
|
||||||
assert geom_equal(new,default)
|
assert geom_equal(new,default)
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_geom(self,tmpdir):
|
def test_invalid_geom(self,tmpdir):
|
||||||
with open('invalid_file','w') as f:
|
with open('invalid_file','w') as f:
|
||||||
f.write('this is not a valid header')
|
f.write('this is not a valid header')
|
||||||
with open('invalid_file','r') as f:
|
with open('invalid_file','r') as f:
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
Geom.from_file(f)
|
Geom.load_ASCII(f)
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_vtr(self,tmpdir):
|
def test_invalid_vtr(self,tmpdir):
|
||||||
v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
||||||
v.to_file(tmpdir/'no_materialpoint.vtr')
|
v.save(tmpdir/'no_materialpoint.vtr')
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(tmpdir/'no_materialpoint.vtr'): break
|
if os.path.exists(tmpdir/'no_materialpoint.vtr'): break
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
Geom.from_vtr(tmpdir/'no_materialpoint.vtr')
|
Geom.load(tmpdir/'no_materialpoint.vtr')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('pack',[True,False])
|
@pytest.mark.parametrize('compress',[True,False])
|
||||||
def test_pack(self,default,tmpdir,pack):
|
def test_compress(self,default,tmpdir,compress):
|
||||||
default.to_file(tmpdir/'default.geom',format='ASCII',pack=pack)
|
default.save_ASCII(tmpdir/'default.geom',compress=compress)
|
||||||
new = Geom.from_file(tmpdir/'default.geom')
|
new = Geom.load_ASCII(tmpdir/'default.geom')
|
||||||
assert geom_equal(new,default)
|
assert geom_equal(new,default)
|
||||||
|
|
||||||
def test_invalid_combination(self,default):
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
default.duplicate(default.microstructure[1:,1:,1:],size=np.ones(3), autosize=True)
|
|
||||||
|
|
||||||
def test_invalid_size(self,default):
|
def test_invalid_size(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
default.duplicate(default.microstructure[1:,1:,1:],size=np.ones(2))
|
Geom(default.material[1:,1:,1:],
|
||||||
|
size=np.ones(2))
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_origin(self,default):
|
def test_invalid_origin(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
default.duplicate(default.microstructure[1:,1:,1:],origin=np.ones(4))
|
Geom(default.material[1:,1:,1:],
|
||||||
|
size=np.ones(3),
|
||||||
|
origin=np.ones(4))
|
||||||
|
|
||||||
def test_invalid_microstructure_size(self,default):
|
|
||||||
microstructure = np.ones((3,3))
|
def test_invalid_materials_shape(self,default):
|
||||||
|
material = np.ones((3,3))
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
default.duplicate(microstructure)
|
Geom(material,
|
||||||
|
size=np.ones(3))
|
||||||
|
|
||||||
def test_invalid_microstructure_type(self,default):
|
|
||||||
microstructure = np.random.randint(1,300,(3,4,5))==1
|
|
||||||
with pytest.raises(TypeError):
|
|
||||||
default.duplicate(microstructure)
|
|
||||||
|
|
||||||
def test_invalid_homogenization(self,default):
|
def test_invalid_materials_type(self,default):
|
||||||
|
material = np.random.randint(1,300,(3,4,5))==1
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
default.set_homogenization(homogenization=0)
|
Geom(material)
|
||||||
|
|
||||||
def test_invalid_write_format(self,default):
|
|
||||||
with pytest.raises(TypeError):
|
|
||||||
default.to_file(format='invalid')
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('directions,reflect',[
|
@pytest.mark.parametrize('directions,reflect',[
|
||||||
(['x'], False),
|
(['x'], False),
|
||||||
|
@ -154,10 +128,11 @@ class TestGeom:
|
||||||
modified = default.mirror(directions,reflect)
|
modified = default.mirror(directions,reflect)
|
||||||
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
||||||
reference = reference_dir/f'mirror_{tag}.geom'
|
reference = reference_dir/f'mirror_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
|
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
|
||||||
def test_mirror_invalid(self,default,directions):
|
def test_mirror_invalid(self,default,directions):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -175,17 +150,20 @@ class TestGeom:
|
||||||
modified = default.flip(directions)
|
modified = default.flip(directions)
|
||||||
tag = f'directions={"-".join(directions)}'
|
tag = f'directions={"-".join(directions)}'
|
||||||
reference = reference_dir/f'flip_{tag}.geom'
|
reference = reference_dir/f'flip_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
def test_flip_invariant(self,default):
|
def test_flip_invariant(self,default):
|
||||||
assert geom_equal(default,default.flip([]))
|
assert geom_equal(default,default.flip([]))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('direction',[['x'],['x','y']])
|
@pytest.mark.parametrize('direction',[['x'],['x','y']])
|
||||||
def test_flip_double(self,default,direction):
|
def test_flip_double(self,default,direction):
|
||||||
assert geom_equal(default,default.flip(direction).flip(direction))
|
assert geom_equal(default,default.flip(direction).flip(direction))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
|
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
|
||||||
def test_flip_invalid(self,default,directions):
|
def test_flip_invalid(self,default,directions):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -199,14 +177,15 @@ class TestGeom:
|
||||||
current = default.clean(stencil,selection,periodic)
|
current = default.clean(stencil,selection,periodic)
|
||||||
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
|
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
|
||||||
if update and stencil > 1:
|
if update and stencil > 1:
|
||||||
current.to_file(reference,format='vtr')
|
current.save(reference)
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(reference.with_suffix('.vtr')): break
|
if os.path.exists(reference.with_suffix('.vtr')): break
|
||||||
assert geom_equal(Geom.from_vtr(reference) if stencil > 1 else default,
|
assert geom_equal(Geom.load(reference) if stencil > 1 else default,
|
||||||
current
|
current
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('grid',[
|
@pytest.mark.parametrize('grid',[
|
||||||
(10,11,10),
|
(10,11,10),
|
||||||
[10,13,10],
|
[10,13,10],
|
||||||
|
@ -220,26 +199,33 @@ class TestGeom:
|
||||||
modified = default.scale(grid)
|
modified = default.scale(grid)
|
||||||
tag = f'grid={util.srepr(grid,"-")}'
|
tag = f'grid={util.srepr(grid,"-")}'
|
||||||
reference = reference_dir/f'scale_{tag}.geom'
|
reference = reference_dir/f'scale_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
def test_renumber(self,default):
|
def test_renumber(self,default):
|
||||||
microstructure = default.get_microstructure()
|
material = default.material.copy()
|
||||||
for m in np.unique(microstructure):
|
for m in np.unique(material):
|
||||||
microstructure[microstructure==m] = microstructure.max() + np.random.randint(1,30)
|
material[material==m] = material.max() + np.random.randint(1,30)
|
||||||
modified = default.duplicate(microstructure)
|
modified = Geom(material,
|
||||||
|
default.size,
|
||||||
|
default.origin)
|
||||||
assert not geom_equal(modified,default)
|
assert not geom_equal(modified,default)
|
||||||
assert geom_equal(default,
|
assert geom_equal(default,
|
||||||
modified.renumber())
|
modified.renumber())
|
||||||
|
|
||||||
|
|
||||||
def test_substitute(self,default):
|
def test_substitute(self,default):
|
||||||
offset = np.random.randint(1,500)
|
offset = np.random.randint(1,500)
|
||||||
modified = default.duplicate(default.get_microstructure() + offset)
|
modified = Geom(default.material + offset,
|
||||||
|
default.size,
|
||||||
|
default.origin)
|
||||||
assert not geom_equal(modified,default)
|
assert not geom_equal(modified,default)
|
||||||
assert geom_equal(default,
|
assert geom_equal(default,
|
||||||
modified.substitute(np.arange(default.microstructure.max())+1+offset,
|
modified.substitute(np.arange(default.material.max())+1+offset,
|
||||||
np.arange(default.microstructure.max())+1))
|
np.arange(default.material.max())+1))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]),
|
@pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]),
|
||||||
np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])])
|
np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])])
|
||||||
|
@ -249,21 +235,24 @@ class TestGeom:
|
||||||
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
|
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
|
||||||
assert geom_equal(default,modified)
|
assert geom_equal(default,modified)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
|
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
|
||||||
[0.0,32.0,240.0]])
|
[0.0,32.0,240.0]])
|
||||||
def test_rotate(self,default,update,reference_dir,Eulers):
|
def test_rotate(self,default,update,reference_dir,Eulers):
|
||||||
modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
||||||
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
||||||
reference = reference_dir/f'rotate_{tag}.geom'
|
reference = reference_dir/f'rotate_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
def test_canvas(self,default):
|
def test_canvas(self,default):
|
||||||
grid = default.grid
|
grid = default.grid
|
||||||
grid_add = np.random.randint(0,30,(3))
|
grid_add = np.random.randint(0,30,(3))
|
||||||
modified = default.canvas(grid + grid_add)
|
modified = default.canvas(grid + grid_add)
|
||||||
assert np.all(modified.microstructure[:grid[0],:grid[1],:grid[2]] == default.microstructure)
|
assert np.all(modified.material[:grid[0],:grid[1],:grid[2]] == default.material)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8),
|
@pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8),
|
||||||
(np.random.randint(4,8,(3)),np.random.randint(9,12,(3)))])
|
(np.random.randint(4,8,(3)),np.random.randint(9,12,(3)))])
|
||||||
|
@ -276,13 +265,14 @@ class TestGeom:
|
||||||
np.random.rand()*4,
|
np.random.rand()*4,
|
||||||
np.random.randint(20)])
|
np.random.randint(20)])
|
||||||
def test_add_primitive_shift(self,center1,center2,diameter,exponent):
|
def test_add_primitive_shift(self,center1,center2,diameter,exponent):
|
||||||
"""Same volume fraction for periodic microstructures and different center."""
|
"""Same volume fraction for periodic geometries and different center."""
|
||||||
o = np.random.random(3)-.5
|
o = np.random.random(3)-.5
|
||||||
g = np.random.randint(8,32,(3))
|
g = np.random.randint(8,32,(3))
|
||||||
s = np.random.random(3)+.5
|
s = np.random.random(3)+.5
|
||||||
G_1 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent)
|
G_1 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent)
|
||||||
G_2 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent)
|
G_2 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent)
|
||||||
assert np.count_nonzero(G_1.microstructure!=2) == np.count_nonzero(G_2.microstructure!=2)
|
assert np.count_nonzero(G_1.material!=2) == np.count_nonzero(G_2.material!=2)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('center',[np.random.randint(4,10,(3)),
|
@pytest.mark.parametrize('center',[np.random.randint(4,10,(3)),
|
||||||
np.random.randint(2,10),
|
np.random.randint(2,10),
|
||||||
|
@ -299,6 +289,7 @@ class TestGeom:
|
||||||
G_2 = Geom(np.ones(g,'i'),[1.,1.,1.]).add_primitive(.3,center,1,fill,Rotation.from_Eulers(eu),inverse,periodic=periodic)
|
G_2 = Geom(np.ones(g,'i'),[1.,1.,1.]).add_primitive(.3,center,1,fill,Rotation.from_Eulers(eu),inverse,periodic=periodic)
|
||||||
assert geom_equal(G_1,G_2)
|
assert geom_equal(G_1,G_2)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('trigger',[[1],[]])
|
@pytest.mark.parametrize('trigger',[[1],[]])
|
||||||
def test_vicinity_offset(self,trigger):
|
def test_vicinity_offset(self,trigger):
|
||||||
offset = np.random.randint(2,4)
|
offset = np.random.randint(2,4)
|
||||||
|
@ -317,13 +308,15 @@ class TestGeom:
|
||||||
|
|
||||||
geom = Geom(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
|
geom = Geom(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
|
||||||
|
|
||||||
assert np.all(m2==geom.microstructure)
|
assert np.all(m2==geom.material)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('periodic',[True,False])
|
@pytest.mark.parametrize('periodic',[True,False])
|
||||||
def test_vicinity_offset_invariant(self,default,periodic):
|
def test_vicinity_offset_invariant(self,default,periodic):
|
||||||
old = default.get_microstructure()
|
offset = default.vicinity_offset(trigger=[default.material.max()+1,
|
||||||
default.vicinity_offset(trigger=[old.max()+1,old.min()-1])
|
default.material.min()-1])
|
||||||
assert np.all(old==default.microstructure)
|
assert np.all(offset.material==default.material)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('periodic',[True,False])
|
@pytest.mark.parametrize('periodic',[True,False])
|
||||||
def test_tessellation_approaches(self,periodic):
|
def test_tessellation_approaches(self,periodic):
|
||||||
|
@ -335,6 +328,7 @@ class TestGeom:
|
||||||
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(N_seeds),periodic)
|
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(N_seeds),periodic)
|
||||||
assert geom_equal(Laguerre,Voronoi)
|
assert geom_equal(Laguerre,Voronoi)
|
||||||
|
|
||||||
|
|
||||||
def test_Laguerre_weights(self):
|
def test_Laguerre_weights(self):
|
||||||
grid = np.random.randint(10,20,3)
|
grid = np.random.randint(10,20,3)
|
||||||
size = np.random.random(3) + 1.0
|
size = np.random.random(3) + 1.0
|
||||||
|
@ -344,17 +338,18 @@ class TestGeom:
|
||||||
ms = np.random.randint(1, N_seeds+1)
|
ms = np.random.randint(1, N_seeds+1)
|
||||||
weights[ms-1] = np.random.random()
|
weights[ms-1] = np.random.random()
|
||||||
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,weights,np.random.random()>0.5)
|
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,weights,np.random.random()>0.5)
|
||||||
assert np.all(Laguerre.microstructure == ms)
|
assert np.all(Laguerre.material == ms)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
|
@pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
|
||||||
def test_tessellate_bicrystal(self,approach):
|
def test_tessellate_bicrystal(self,approach):
|
||||||
grid = np.random.randint(5,10,3)*2
|
grid = np.random.randint(5,10,3)*2
|
||||||
size = grid.astype(np.float)
|
size = grid.astype(np.float)
|
||||||
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
|
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
|
||||||
microstructure = np.ones(grid)
|
material = np.ones(grid)
|
||||||
microstructure[:,grid[1]//2:,:] = 2
|
material[:,grid[1]//2:,:] = 2
|
||||||
if approach == 'Laguerre':
|
if approach == 'Laguerre':
|
||||||
geom = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(2),np.random.random()>0.5)
|
geom = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(2),np.random.random()>0.5)
|
||||||
elif approach == 'Voronoi':
|
elif approach == 'Voronoi':
|
||||||
geom = Geom.from_Voronoi_tessellation(grid,size,seeds, np.random.random()>0.5)
|
geom = Geom.from_Voronoi_tessellation(grid,size,seeds, np.random.random()>0.5)
|
||||||
assert np.all(geom.microstructure == microstructure)
|
assert np.all(geom.material == material)
|
||||||
|
|
|
@ -106,8 +106,8 @@ class TestOrientation:
|
||||||
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
||||||
table = Table(eu,{'Eulers':(3,)})
|
table = Table(eu,{'Eulers':(3,)})
|
||||||
table = table.add('pos',coords)
|
table = table.add('pos',coords)
|
||||||
table.to_ASCII(reference)
|
table.save(reference)
|
||||||
assert np.allclose(eu,Table.from_ASCII(reference).get('Eulers'))
|
assert np.allclose(eu,Table.load(reference).get('Eulers'))
|
||||||
|
|
||||||
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
||||||
def test_disorientation360(self,lattice):
|
def test_disorientation360(self,lattice):
|
||||||
|
@ -129,4 +129,3 @@ class TestOrientation:
|
||||||
eqs = [r for r in R_1.equivalent]
|
eqs = [r for r in R_1.equivalent]
|
||||||
R_2 = Orientation.from_average(eqs)
|
R_2 = Orientation.from_average(eqs)
|
||||||
assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion)
|
assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion)
|
||||||
|
|
||||||
|
|
|
@ -339,8 +339,8 @@ class TestResult:
|
||||||
@pytest.mark.parametrize('output',['F',[],['F','P']])
|
@pytest.mark.parametrize('output',['F',[],['F','P']])
|
||||||
def test_vtk(self,tmp_path,default,output):
|
def test_vtk(self,tmp_path,default,output):
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
default.to_vtk(output)
|
default.save_vtk(output)
|
||||||
|
|
||||||
def test_XDMF(self,tmp_path,single_phase):
|
def test_XDMF(self,tmp_path,single_phase):
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
single_phase.write_XDMF()
|
single_phase.save_XDMF()
|
||||||
|
|
|
@ -461,7 +461,7 @@ def mul(me, other):
|
||||||
if other.shape == (3,):
|
if other.shape == (3,):
|
||||||
A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:])
|
A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:])
|
||||||
B = 2.0 * np.dot(me.quaternion[1:],other)
|
B = 2.0 * np.dot(me.quaternion[1:],other)
|
||||||
C = 2.0 * _P*me.quaternion[0]
|
C = 2.0 * _P * me.quaternion[0]
|
||||||
|
|
||||||
return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other)
|
return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other)
|
||||||
|
|
||||||
|
@ -496,9 +496,8 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om),
|
||||||
(Rotation._om2eu,Rotation._eu2om),
|
(Rotation._om2eu,Rotation._eu2om),
|
||||||
|
@ -512,8 +511,7 @@ class TestRotation:
|
||||||
m = rot.as_matrix()
|
m = rot.as_matrix()
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.det(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu),
|
||||||
(Rotation._eu2om,Rotation._om2eu),
|
(Rotation._eu2om,Rotation._om2eu),
|
||||||
|
@ -531,9 +529,9 @@ class TestRotation:
|
||||||
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
||||||
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
||||||
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
||||||
ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
|
||||||
assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all()
|
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax),
|
||||||
(Rotation._ax2om,Rotation._om2ax),
|
(Rotation._ax2om,Rotation._om2ax),
|
||||||
|
@ -548,9 +546,8 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(m[3],np.pi,atol=atol):
|
if np.isclose(m[3],np.pi,atol=atol):
|
||||||
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro),
|
||||||
#(Rotation._ro2om,Rotation._om2ro),
|
#(Rotation._ro2om,Rotation._om2ro),
|
||||||
|
@ -566,8 +563,7 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
||||||
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho),
|
||||||
(Rotation._ho2om,Rotation._om2ho),
|
(Rotation._ho2om,Rotation._om2ho),
|
||||||
|
@ -581,8 +577,7 @@ class TestRotation:
|
||||||
m = rot.as_homochoric()
|
m = rot.as_homochoric()
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.linalg.norm(o) < _R1 + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.linalg.norm(o) < _R1 + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu),
|
||||||
(Rotation._cu2om,Rotation._om2cu),
|
(Rotation._cu2om,Rotation._om2cu),
|
||||||
|
@ -598,8 +593,7 @@ class TestRotation:
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om),
|
||||||
(Rotation._qu2eu,qu2eu),
|
(Rotation._qu2eu,qu2eu),
|
||||||
|
@ -612,8 +606,7 @@ class TestRotation:
|
||||||
vectorized(qu.reshape(qu.shape[0]//2,-1,4))
|
vectorized(qu.reshape(qu.shape[0]//2,-1,4))
|
||||||
co = vectorized(qu)
|
co = vectorized(qu)
|
||||||
for q,c in zip(qu,co):
|
for q,c in zip(qu,co):
|
||||||
print(q,c)
|
assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)), f'{q},{c}'
|
||||||
assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu),
|
||||||
|
@ -625,8 +618,7 @@ class TestRotation:
|
||||||
vectorized(om.reshape(om.shape[0]//2,-1,3,3))
|
vectorized(om.reshape(om.shape[0]//2,-1,3,3))
|
||||||
co = vectorized(om)
|
co = vectorized(om)
|
||||||
for o,c in zip(om,co):
|
for o,c in zip(om,co):
|
||||||
print(o,c)
|
assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)), f'{o},{c}'
|
||||||
assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu),
|
||||||
(Rotation._eu2om,eu2om),
|
(Rotation._eu2om,eu2om),
|
||||||
|
@ -638,8 +630,7 @@ class TestRotation:
|
||||||
vectorized(eu.reshape(eu.shape[0]//2,-1,3))
|
vectorized(eu.reshape(eu.shape[0]//2,-1,3))
|
||||||
co = vectorized(eu)
|
co = vectorized(eu)
|
||||||
for e,c in zip(eu,co):
|
for e,c in zip(eu,co):
|
||||||
print(e,c)
|
assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)), f'{e},{c}'
|
||||||
assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu),
|
||||||
(Rotation._ax2om,ax2om),
|
(Rotation._ax2om,ax2om),
|
||||||
|
@ -651,8 +642,7 @@ class TestRotation:
|
||||||
vectorized(ax.reshape(ax.shape[0]//2,-1,4))
|
vectorized(ax.reshape(ax.shape[0]//2,-1,4))
|
||||||
co = vectorized(ax)
|
co = vectorized(ax)
|
||||||
for a,c in zip(ax,co):
|
for a,c in zip(ax,co):
|
||||||
print(a,c)
|
assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)), f'{a},{c}'
|
||||||
assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax),
|
||||||
|
@ -663,8 +653,7 @@ class TestRotation:
|
||||||
vectorized(ro.reshape(ro.shape[0]//2,-1,4))
|
vectorized(ro.reshape(ro.shape[0]//2,-1,4))
|
||||||
co = vectorized(ro)
|
co = vectorized(ro)
|
||||||
for r,c in zip(ro,co):
|
for r,c in zip(ro,co):
|
||||||
print(r,c)
|
assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)), f'{r},{c}'
|
||||||
assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax),
|
||||||
(Rotation._ho2cu,ho2cu)])
|
(Rotation._ho2cu,ho2cu)])
|
||||||
|
@ -674,8 +663,7 @@ class TestRotation:
|
||||||
vectorized(ho.reshape(ho.shape[0]//2,-1,3))
|
vectorized(ho.reshape(ho.shape[0]//2,-1,3))
|
||||||
co = vectorized(ho)
|
co = vectorized(ho)
|
||||||
for h,c in zip(ho,co):
|
for h,c in zip(ho,co):
|
||||||
print(h,c)
|
assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)), f'{h},{c}'
|
||||||
assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)])
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)])
|
||||||
def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single):
|
def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single):
|
||||||
|
@ -684,8 +672,7 @@ class TestRotation:
|
||||||
vectorized(cu.reshape(cu.shape[0]//2,-1,3))
|
vectorized(cu.reshape(cu.shape[0]//2,-1,3))
|
||||||
co = vectorized(cu)
|
co = vectorized(cu)
|
||||||
for u,c in zip(cu,co):
|
for u,c in zip(cu,co):
|
||||||
print(u,c)
|
assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)), f'{u},{c}'
|
||||||
assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('func',[Rotation.from_axis_angle])
|
@pytest.mark.parametrize('func',[Rotation.from_axis_angle])
|
||||||
def test_normalization_vectorization(self,func):
|
def test_normalization_vectorization(self,func):
|
||||||
|
@ -703,9 +690,8 @@ class TestRotation:
|
||||||
o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion()
|
o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('normalize',[True,False])
|
@pytest.mark.parametrize('normalize',[True,False])
|
||||||
|
@ -717,12 +703,12 @@ class TestRotation:
|
||||||
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers()
|
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers()
|
||||||
u = np.array([np.pi*2,np.pi,np.pi*2])
|
u = np.array([np.pi*2,np.pi,np.pi*2])
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
||||||
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
||||||
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
||||||
ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
|
||||||
assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all()
|
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
def test_matrix(self,set_of_rotations):
|
def test_matrix(self,set_of_rotations):
|
||||||
for rot in set_of_rotations:
|
for rot in set_of_rotations:
|
||||||
|
@ -731,8 +717,8 @@ class TestRotation:
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(m[3],np.pi,atol=atol):
|
if np.isclose(m[3],np.pi,atol=atol):
|
||||||
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9
|
and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('normalize',[True,False])
|
@pytest.mark.parametrize('normalize',[True,False])
|
||||||
|
@ -742,8 +728,7 @@ class TestRotation:
|
||||||
m = rot.as_matrix()
|
m = rot.as_matrix()
|
||||||
o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix()
|
o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o)
|
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}'
|
||||||
assert ok and np.isclose(np.linalg.det(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
def test_homochoric(self,set_of_rotations,P):
|
def test_homochoric(self,set_of_rotations,P):
|
||||||
|
@ -753,8 +738,7 @@ class TestRotation:
|
||||||
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues()
|
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues()
|
||||||
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
||||||
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
def test_cubochoric(self,set_of_rotations,P):
|
def test_cubochoric(self,set_of_rotations,P):
|
||||||
|
@ -762,8 +746,7 @@ class TestRotation:
|
||||||
m = rot.as_homochoric()
|
m = rot.as_homochoric()
|
||||||
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
|
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('accept_homomorph',[True,False])
|
@pytest.mark.parametrize('accept_homomorph',[True,False])
|
||||||
|
@ -774,9 +757,8 @@ class TestRotation:
|
||||||
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric()
|
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('reciprocal',[True,False])
|
@pytest.mark.parametrize('reciprocal',[True,False])
|
||||||
def test_basis(self,set_of_rotations,reciprocal):
|
def test_basis(self,set_of_rotations,reciprocal):
|
||||||
|
@ -858,8 +840,7 @@ class TestRotation:
|
||||||
for rot in set_of_rotations:
|
for rot in set_of_rotations:
|
||||||
v = rot.broadcast_to((5,)) @ data
|
v = rot.broadcast_to((5,)) @ data
|
||||||
for i in range(data.shape[0]):
|
for i in range(data.shape[0]):
|
||||||
print(i-data[i])
|
assert np.allclose(mul(rot,data[i]),v[i]), f'{i-data[i]}'
|
||||||
assert np.allclose(mul(rot,data[i]),v[i])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('data',[np.random.rand(3),
|
@pytest.mark.parametrize('data',[np.random.rand(3),
|
||||||
|
@ -926,34 +907,39 @@ class TestRotation:
|
||||||
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
||||||
@pytest.mark.parametrize('N',[1000,10000,100000])
|
@pytest.mark.parametrize('N',[1000,10000,100000])
|
||||||
def test_spherical_component(self,N,sigma):
|
def test_spherical_component(self,N,sigma):
|
||||||
c = Rotation.from_random()
|
p = []
|
||||||
o = Rotation.from_spherical_component(c,sigma,N)
|
for run in range(5):
|
||||||
_, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True)
|
c = Rotation.from_random()
|
||||||
angles[::2] *= -1 # flip angle for every second to symmetrize distribution
|
o = Rotation.from_spherical_component(c,sigma,N)
|
||||||
|
_, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True)
|
||||||
|
angles[::2] *= -1 # flip angle for every second to symmetrize distribution
|
||||||
|
|
||||||
|
p.append(stats.normaltest(angles)[1])
|
||||||
|
|
||||||
p = stats.normaltest(angles)[1]
|
|
||||||
sigma_out = np.std(angles)
|
sigma_out = np.std(angles)
|
||||||
print(f'\np: {p}, sigma ratio {sigma/sigma_out}')
|
p = np.average(p)
|
||||||
assert (.9 < sigma/sigma_out < 1.1) and p > 0.001
|
assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
||||||
@pytest.mark.parametrize('N',[1000,10000,100000])
|
@pytest.mark.parametrize('N',[1000,10000,100000])
|
||||||
def test_from_fiber_component(self,N,sigma):
|
def test_from_fiber_component(self,N,sigma):
|
||||||
"""https://en.wikipedia.org/wiki/Full_width_at_half_maximum."""
|
p = []
|
||||||
alpha = np.random.random(2)*np.pi
|
for run in range(5):
|
||||||
beta = np.random.random(2)*np.pi
|
alpha = np.random.random()*2*np.pi,np.arccos(np.random.random())
|
||||||
|
beta = np.random.random()*2*np.pi,np.arccos(np.random.random())
|
||||||
|
|
||||||
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
||||||
f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )])
|
f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )])
|
||||||
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
||||||
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
|
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
|
||||||
|
|
||||||
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False)
|
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False)
|
||||||
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1))
|
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1))
|
||||||
dist = np.array(angles) * (np.random.randint(0,2,N)*2-1)
|
dist = np.array(angles) * (np.random.randint(0,2,N)*2-1)
|
||||||
|
|
||||||
|
p.append(stats.normaltest(dist)[1])
|
||||||
|
|
||||||
p = stats.normaltest(dist)[1]
|
|
||||||
sigma_out = np.degrees(np.std(dist))
|
sigma_out = np.degrees(np.std(dist))
|
||||||
print(f'\np: {p}, sigma ratio {sigma/sigma_out}')
|
p = np.average(p)
|
||||||
assert (.9 < sigma/sigma_out < 1.1) and p > 0.001
|
assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}'
|
||||||
|
|
|
@ -35,50 +35,50 @@ class TestTable:
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['str','path'])
|
@pytest.mark.parametrize('mode',['str','path'])
|
||||||
def test_write_read(self,default,tmpdir,mode):
|
def test_write_read(self,default,tmpdir,mode):
|
||||||
default.to_file(tmpdir/'default.txt')
|
default.save(tmpdir/'default.txt')
|
||||||
if mode == 'path':
|
if mode == 'path':
|
||||||
new = Table.from_ASCII(tmpdir/'default.txt')
|
new = Table.load(tmpdir/'default.txt')
|
||||||
elif mode == 'str':
|
elif mode == 'str':
|
||||||
new = Table.from_ASCII(str(tmpdir/'default.txt'))
|
new = Table.load(str(tmpdir/'default.txt'))
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_read_file(self,default,tmpdir):
|
def test_write_read_file(self,default,tmpdir):
|
||||||
with open(tmpdir/'default.txt','w') as f:
|
with open(tmpdir/'default.txt','w') as f:
|
||||||
default.to_file(f)
|
default.save(f)
|
||||||
with open(tmpdir/'default.txt') as f:
|
with open(tmpdir/'default.txt') as f:
|
||||||
new = Table.from_ASCII(f)
|
new = Table.load(f)
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_read_new_style(self,default,tmpdir):
|
def test_write_read_legacy_style(self,default,tmpdir):
|
||||||
with open(tmpdir/'new_style.txt','w') as f:
|
with open(tmpdir/'legacy.txt','w') as f:
|
||||||
default.to_file(f,new_style=True)
|
default.save(f,legacy=True)
|
||||||
with open(tmpdir/'new_style.txt') as f:
|
with open(tmpdir/'legacy.txt') as f:
|
||||||
new = Table.from_ASCII(f)
|
new = Table.load(f)
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_invalid_format(self,default,tmpdir):
|
def test_write_invalid_format(self,default,tmpdir):
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid')
|
default.save(tmpdir/'shouldnotbethere.txt',format='invalid')
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['str','path'])
|
@pytest.mark.parametrize('mode',['str','path'])
|
||||||
def test_read_ang(self,reference_dir,mode):
|
def test_read_ang(self,reference_dir,mode):
|
||||||
if mode == 'path':
|
if mode == 'path':
|
||||||
new = Table.from_ang(reference_dir/'simple.ang')
|
new = Table.load_ang(reference_dir/'simple.ang')
|
||||||
elif mode == 'str':
|
elif mode == 'str':
|
||||||
new = Table.from_ang(str(reference_dir/'simple.ang'))
|
new = Table.load_ang(str(reference_dir/'simple.ang'))
|
||||||
assert new.data.shape == (4,10) and \
|
assert new.data.shape == (4,10) and \
|
||||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||||
|
|
||||||
def test_read_ang_file(self,reference_dir):
|
def test_read_ang_file(self,reference_dir):
|
||||||
f = open(reference_dir/'simple.ang')
|
f = open(reference_dir/'simple.ang')
|
||||||
new = Table.from_ang(f)
|
new = Table.load_ang(f)
|
||||||
assert new.data.shape == (4,10) and \
|
assert new.data.shape == (4,10) and \
|
||||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||||
|
|
||||||
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
|
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
|
||||||
def test_read_strange(self,reference_dir,fname):
|
def test_read_strange(self,reference_dir,fname):
|
||||||
with open(reference_dir/fname) as f:
|
with open(reference_dir/fname) as f:
|
||||||
Table.from_ASCII(f)
|
Table.load(f)
|
||||||
|
|
||||||
def test_set(self,default):
|
def test_set(self,default):
|
||||||
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
||||||
|
@ -166,7 +166,7 @@ class TestTable:
|
||||||
x = np.random.random((5,12))
|
x = np.random.random((5,12))
|
||||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||||
unsort = t.get('4_F')
|
unsort = t.get('4_F')
|
||||||
sort = t.sort_by('4_F').get('4_F')
|
sort = t.sort_by('4_F').get('4_F')
|
||||||
assert np.all(np.sort(unsort,0)==sort)
|
assert np.all(np.sort(unsort,0)==sort)
|
||||||
|
|
||||||
def test_sort_revert(self):
|
def test_sort_revert(self):
|
||||||
|
@ -179,6 +179,6 @@ class TestTable:
|
||||||
t = Table(np.array([[0,1,],[2,1,]]),
|
t = Table(np.array([[0,1,],[2,1,]]),
|
||||||
{'v':(2,)},
|
{'v':(2,)},
|
||||||
['test data'])\
|
['test data'])\
|
||||||
.add('s',np.array(['b','a']))\
|
.add('s',np.array(['b','a']))\
|
||||||
.sort_by('s')
|
.sort_by('s')
|
||||||
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
||||||
|
|
|
@ -32,22 +32,22 @@ class TestVTK:
|
||||||
origin = np.random.random(3)
|
origin = np.random.random(3)
|
||||||
v = VTK.from_rectilinearGrid(grid,size,origin)
|
v = VTK.from_rectilinearGrid(grid,size,origin)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'rectilinearGrid',False)
|
v.save(tmp_path/'rectilinearGrid',False)
|
||||||
vtr = VTK.from_file(tmp_path/'rectilinearGrid.vtr')
|
vtr = VTK.load(tmp_path/'rectilinearGrid.vtr')
|
||||||
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
|
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
|
vtk = VTK.load(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
|
||||||
assert(string == vtr.__repr__() == vtk.__repr__())
|
assert(string == vtr.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
def test_polyData(self,tmp_path):
|
def test_polyData(self,tmp_path):
|
||||||
points = np.random.rand(100,3)
|
points = np.random.rand(100,3)
|
||||||
v = VTK.from_polyData(points)
|
v = VTK.from_polyData(points)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'polyData',False)
|
v.save(tmp_path/'polyData',False)
|
||||||
vtp = VTK.from_file(tmp_path/'polyData.vtp')
|
vtp = VTK.load(tmp_path/'polyData.vtp')
|
||||||
with open(tmp_path/'polyData.vtk','w') as f:
|
with open(tmp_path/'polyData.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'polyData.vtk','polyData')
|
vtk = VTK.load(tmp_path/'polyData.vtk','polyData')
|
||||||
assert(string == vtp.__repr__() == vtk.__repr__())
|
assert(string == vtp.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
@pytest.mark.parametrize('cell_type,n',[
|
@pytest.mark.parametrize('cell_type,n',[
|
||||||
|
@ -62,11 +62,11 @@ class TestVTK:
|
||||||
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
|
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
|
||||||
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
|
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'unstructuredGrid',False)
|
v.save(tmp_path/'unstructuredGrid',False)
|
||||||
vtu = VTK.from_file(tmp_path/'unstructuredGrid.vtu')
|
vtu = VTK.load(tmp_path/'unstructuredGrid.vtu')
|
||||||
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
|
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
|
vtk = VTK.load(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
|
||||||
assert(string == vtu.__repr__() == vtk.__repr__())
|
assert(string == vtu.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
|
|
||||||
|
@ -75,8 +75,8 @@ class TestVTK:
|
||||||
v = VTK.from_polyData(points)
|
v = VTK.from_polyData(points)
|
||||||
fname_s = tmp_path/'single.vtp'
|
fname_s = tmp_path/'single.vtp'
|
||||||
fname_p = tmp_path/'parallel.vtp'
|
fname_p = tmp_path/'parallel.vtp'
|
||||||
v.to_file(fname_s,False)
|
v.save(fname_s,False)
|
||||||
v.to_file(fname_p,True)
|
v.save(fname_p,True)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p):
|
if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p):
|
||||||
assert(True)
|
assert(True)
|
||||||
|
@ -90,11 +90,11 @@ class TestVTK:
|
||||||
('this_file_does_not_exist.vtx', None)])
|
('this_file_does_not_exist.vtx', None)])
|
||||||
def test_invalid_dataset_type(self,name,dataset_type):
|
def test_invalid_dataset_type(self,name,dataset_type):
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
VTK.from_file(name,dataset_type)
|
VTK.load(name,dataset_type)
|
||||||
|
|
||||||
def test_invalid_extension_write(self,default):
|
def test_invalid_extension_write(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
default.to_file('default.txt')
|
default.save('default.txt')
|
||||||
|
|
||||||
def test_invalid_get(self,default):
|
def test_invalid_get(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -115,8 +115,8 @@ class TestVTK:
|
||||||
|
|
||||||
def test_comments(self,tmp_path,default):
|
def test_comments(self,tmp_path,default):
|
||||||
default.add_comments(['this is a comment'])
|
default.add_comments(['this is a comment'])
|
||||||
default.to_file(tmp_path/'with_comments',parallel=False)
|
default.save(tmp_path/'with_comments',parallel=False)
|
||||||
new = VTK.from_file(tmp_path/'with_comments.vtr')
|
new = VTK.load(tmp_path/'with_comments.vtr')
|
||||||
assert new.get_comments() == ['this is a comment']
|
assert new.get_comments() == ['this is a comment']
|
||||||
|
|
||||||
def test_compare_reference_polyData(self,update,reference_dir,tmp_path):
|
def test_compare_reference_polyData(self,update,reference_dir,tmp_path):
|
||||||
|
@ -124,9 +124,9 @@ class TestVTK:
|
||||||
polyData = VTK.from_polyData(points)
|
polyData = VTK.from_polyData(points)
|
||||||
polyData.add(points,'coordinates')
|
polyData.add(points,'coordinates')
|
||||||
if update:
|
if update:
|
||||||
polyData.to_file(reference_dir/'polyData')
|
polyData.save(reference_dir/'polyData')
|
||||||
else:
|
else:
|
||||||
reference = VTK.from_file(reference_dir/'polyData.vtp')
|
reference = VTK.load(reference_dir/'polyData.vtp')
|
||||||
assert polyData.__repr__() == reference.__repr__() and \
|
assert polyData.__repr__() == reference.__repr__() and \
|
||||||
np.allclose(polyData.get('coordinates'),points)
|
np.allclose(polyData.get('coordinates'),points)
|
||||||
|
|
||||||
|
@ -139,8 +139,8 @@ class TestVTK:
|
||||||
rectilinearGrid.add(c,'cell')
|
rectilinearGrid.add(c,'cell')
|
||||||
rectilinearGrid.add(n,'node')
|
rectilinearGrid.add(n,'node')
|
||||||
if update:
|
if update:
|
||||||
rectilinearGrid.to_file(reference_dir/'rectilinearGrid')
|
rectilinearGrid.save(reference_dir/'rectilinearGrid')
|
||||||
else:
|
else:
|
||||||
reference = VTK.from_file(reference_dir/'rectilinearGrid.vtr')
|
reference = VTK.load(reference_dir/'rectilinearGrid.vtr')
|
||||||
assert rectilinearGrid.__repr__() == reference.__repr__() and \
|
assert rectilinearGrid.__repr__() == reference.__repr__() and \
|
||||||
np.allclose(rectilinearGrid.get('cell'),c)
|
np.allclose(rectilinearGrid.get('cell'),c)
|
||||||
|
|
|
@ -18,8 +18,8 @@ class TestUtil:
|
||||||
|
|
||||||
@pytest.mark.parametrize('input,output',
|
@pytest.mark.parametrize('input,output',
|
||||||
[
|
[
|
||||||
([2,0],[1,0]),
|
([0,-2],[0,-1]),
|
||||||
([0.5,0.5],[1,1]),
|
([-0.5,0.5],[-1,1]),
|
||||||
([1./2.,1./3.],[3,2]),
|
([1./2.,1./3.],[3,2]),
|
||||||
([2./3.,1./2.,1./3.],[4,3,2]),
|
([2./3.,1./2.,1./3.],[4,3,2]),
|
||||||
])
|
])
|
||||||
|
@ -30,4 +30,4 @@ class TestUtil:
|
||||||
|
|
||||||
def test_lackofprecision(self):
|
def test_lackofprecision(self):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
util.scale_to_coprime(np.array([1/3333,1,1]))
|
util.scale_to_coprime(np.array([1/333.333,1,1]))
|
||||||
|
|
|
@ -106,7 +106,7 @@ subroutine CPFEM_init
|
||||||
num_commercialFEM, &
|
num_commercialFEM, &
|
||||||
debug_CPFEM
|
debug_CPFEM
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
allocate(CPFEM_cs( 6,discretization_nIP,discretization_nElem), source= 0.0_pReal)
|
allocate(CPFEM_cs( 6,discretization_nIP,discretization_nElem), source= 0.0_pReal)
|
||||||
allocate(CPFEM_dcsdE( 6,6,discretization_nIP,discretization_nElem), source= 0.0_pReal)
|
allocate(CPFEM_dcsdE( 6,6,discretization_nIP,discretization_nElem), source= 0.0_pReal)
|
||||||
|
@ -132,7 +132,7 @@ subroutine CPFEM_init
|
||||||
print'(a32,1x,6(i8,1x))', 'CPFEM_cs: ', shape(CPFEM_cs)
|
print'(a32,1x,6(i8,1x))', 'CPFEM_cs: ', shape(CPFEM_cs)
|
||||||
print'(a32,1x,6(i8,1x))', 'CPFEM_dcsdE: ', shape(CPFEM_dcsdE)
|
print'(a32,1x,6(i8,1x))', 'CPFEM_dcsdE: ', shape(CPFEM_dcsdE)
|
||||||
print'(a32,1x,6(i8,1x),/)', 'CPFEM_dcsdE_knownGood: ', shape(CPFEM_dcsdE_knownGood)
|
print'(a32,1x,6(i8,1x),/)', 'CPFEM_dcsdE_knownGood: ', shape(CPFEM_dcsdE_knownGood)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
end subroutine CPFEM_init
|
end subroutine CPFEM_init
|
||||||
|
@ -250,7 +250,7 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS
|
||||||
'<< CPFEM >> stress/MPa at elFE ip ', elFE, ip, CPFEM_cs(1:6,ip,elCP)*1.0e-6_pReal
|
'<< CPFEM >> stress/MPa at elFE ip ', elFE, ip, CPFEM_cs(1:6,ip,elCP)*1.0e-6_pReal
|
||||||
print'(a,i8,1x,i2,/,6(12x,6(f10.3,1x)/))', &
|
print'(a,i8,1x,i2,/,6(12x,6(f10.3,1x)/))', &
|
||||||
'<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal
|
'<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -76,7 +76,7 @@ end subroutine CPFEM_initAll
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine CPFEM_init
|
subroutine CPFEM_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
if (interface_restartInc > 0) call crystallite_restartRead
|
if (interface_restartInc > 0) call crystallite_restartRead
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
#define PETSC_MINOR_MAX 13
|
#define PETSC_MINOR_MAX 13
|
||||||
|
|
||||||
module DAMASK_interface
|
module DAMASK_interface
|
||||||
use, intrinsic :: iso_fortran_env
|
use, intrinsic :: ISO_fortran_env
|
||||||
|
|
||||||
use PETScSys
|
use PETScSys
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ subroutine DAMASK_interface_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- DAMASK_interface init -+>>>'
|
print'(/,a)', ' <<<+- DAMASK_interface init -+>>>'
|
||||||
|
|
||||||
open(6, encoding='UTF-8') ! for special characters in output
|
open(OUTPUT_unit, encoding='UTF-8') ! for special characters in output
|
||||||
|
|
||||||
! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203
|
! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
|
@ -101,8 +101,8 @@ subroutine DAMASK_interface_init
|
||||||
#endif
|
#endif
|
||||||
print*, achar(27)//'[0m'
|
print*, achar(27)//'[0m'
|
||||||
|
|
||||||
print'(a)', ' Roters et al., Computational Materials Science 158:420–478, 2019'
|
print*, 'Roters et al., Computational Materials Science 158:420–478, 2019'
|
||||||
print'(a)', ' https://doi.org/10.1016/j.commatsci.2018.04.030'
|
print*, 'https://doi.org/10.1016/j.commatsci.2018.04.030'
|
||||||
|
|
||||||
print'(/,a)', ' Version: '//DAMASKVERSION
|
print'(/,a)', ' Version: '//DAMASKVERSION
|
||||||
|
|
||||||
|
@ -373,7 +373,7 @@ function makeRelativePath(a,b)
|
||||||
a_cleaned = rectifyPath(trim(a)//'/')
|
a_cleaned = rectifyPath(trim(a)//'/')
|
||||||
b_cleaned = rectifyPath(b)
|
b_cleaned = rectifyPath(b)
|
||||||
|
|
||||||
do i = 1, min(1024,len_trim(a_cleaned),len_trim(rectifyPath(b_cleaned)))
|
do i = 1, min(len_trim(a_cleaned),len_trim(rectifyPath(b_cleaned)))
|
||||||
if (a_cleaned(i:i) /= b_cleaned(i:i)) exit
|
if (a_cleaned(i:i) /= b_cleaned(i:i)) exit
|
||||||
if (a_cleaned(i:i) == '/') posLastCommonSlash = i
|
if (a_cleaned(i:i) == '/') posLastCommonSlash = i
|
||||||
enddo
|
enddo
|
||||||
|
@ -395,7 +395,7 @@ subroutine catchSIGTERM(signal) bind(C)
|
||||||
integer(C_INT), value :: signal
|
integer(C_INT), value :: signal
|
||||||
interface_SIGTERM = .true.
|
interface_SIGTERM = .true.
|
||||||
|
|
||||||
print'(a,i2.2,a)', ' received signal ',signal, ', set SIGTERM=TRUE'
|
print'(a,i0,a)', ' received signal ',signal, ', set SIGTERM=TRUE'
|
||||||
|
|
||||||
end subroutine catchSIGTERM
|
end subroutine catchSIGTERM
|
||||||
|
|
||||||
|
@ -420,7 +420,7 @@ subroutine catchSIGUSR1(signal) bind(C)
|
||||||
integer(C_INT), value :: signal
|
integer(C_INT), value :: signal
|
||||||
interface_SIGUSR1 = .true.
|
interface_SIGUSR1 = .true.
|
||||||
|
|
||||||
print'(a,i2.2,a)', ' received signal ',signal, ', set SIGUSR1=TRUE'
|
print'(a,i0,a)', ' received signal ',signal, ', set SIGUSR1=TRUE'
|
||||||
|
|
||||||
end subroutine catchSIGUSR1
|
end subroutine catchSIGUSR1
|
||||||
|
|
||||||
|
@ -445,7 +445,7 @@ subroutine catchSIGUSR2(signal) bind(C)
|
||||||
integer(C_INT), value :: signal
|
integer(C_INT), value :: signal
|
||||||
interface_SIGUSR2 = .true.
|
interface_SIGUSR2 = .true.
|
||||||
|
|
||||||
print'(a,i2.2,a)', ' received signal ',signal, ', set SIGUSR2=TRUE'
|
print'(a,i0,a)', ' received signal ',signal, ', set SIGUSR2=TRUE'
|
||||||
|
|
||||||
end subroutine catchSIGUSR2
|
end subroutine catchSIGUSR2
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
module DAMASK_interface
|
module DAMASK_interface
|
||||||
use prec
|
use prec
|
||||||
#if __INTEL_COMPILER >= 1800
|
#if __INTEL_COMPILER >= 1800
|
||||||
use, intrinsic :: iso_fortran_env, only: &
|
use, intrinsic :: ISO_fortran_env, only: &
|
||||||
compiler_version, &
|
compiler_version, &
|
||||||
compiler_options
|
compiler_options
|
||||||
#endif
|
#endif
|
||||||
|
|
61
src/IO.f90
61
src/IO.f90
|
@ -6,6 +6,10 @@
|
||||||
!> @brief input/output functions
|
!> @brief input/output functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
module IO
|
module IO
|
||||||
|
use, intrinsic :: ISO_fortran_env, only: &
|
||||||
|
IO_STDOUT => OUTPUT_UNIT, &
|
||||||
|
IO_STDERR => ERROR_UNIT
|
||||||
|
|
||||||
use prec
|
use prec
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
|
@ -16,7 +20,7 @@ module IO
|
||||||
character, parameter, public :: &
|
character, parameter, public :: &
|
||||||
IO_EOL = new_line('DAMASK'), & !< end of line character
|
IO_EOL = new_line('DAMASK'), & !< end of line character
|
||||||
IO_COMMENT = '#'
|
IO_COMMENT = '#'
|
||||||
character(len=*), parameter, private :: &
|
character(len=*), parameter :: &
|
||||||
IO_DIVIDER = '───────────────────'//&
|
IO_DIVIDER = '───────────────────'//&
|
||||||
'───────────────────'//&
|
'───────────────────'//&
|
||||||
'───────────────────'//&
|
'───────────────────'//&
|
||||||
|
@ -37,7 +41,8 @@ module IO
|
||||||
IO_stringAsFloat, &
|
IO_stringAsFloat, &
|
||||||
IO_stringAsBool, &
|
IO_stringAsBool, &
|
||||||
IO_error, &
|
IO_error, &
|
||||||
IO_warning
|
IO_warning, &
|
||||||
|
IO_STDOUT
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
@ -47,7 +52,7 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine IO_init
|
subroutine IO_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- IO init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- IO init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
call selfTest
|
call selfTest
|
||||||
|
|
||||||
|
@ -538,29 +543,29 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
|
||||||
end select
|
end select
|
||||||
|
|
||||||
!$OMP CRITICAL (write2out)
|
!$OMP CRITICAL (write2out)
|
||||||
write(0,'(/,a)') ' ┌'//IO_DIVIDER//'┐'
|
write(IO_STDERR,'(/,a)') ' ┌'//IO_DIVIDER//'┐'
|
||||||
write(0,'(a,24x,a,40x,a)') ' │','error', '│'
|
write(IO_STDERR,'(a,24x,a,40x,a)') ' │','error', '│'
|
||||||
write(0,'(a,24x,i3,42x,a)') ' │',error_ID, '│'
|
write(IO_STDERR,'(a,24x,i3,42x,a)') ' │',error_ID, '│'
|
||||||
write(0,'(a)') ' ├'//IO_DIVIDER//'┤'
|
write(IO_STDERR,'(a)') ' ├'//IO_DIVIDER//'┤'
|
||||||
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',&
|
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',&
|
||||||
max(1,72-len_trim(msg)-4),'x,a)'
|
max(1,72-len_trim(msg)-4),'x,a)'
|
||||||
write(0,formatString) '│ ',trim(msg), '│'
|
write(IO_STDERR,formatString) '│ ',trim(msg), '│'
|
||||||
if (present(ext_msg)) then
|
if (present(ext_msg)) then
|
||||||
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',&
|
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',&
|
||||||
max(1,72-len_trim(ext_msg)-4),'x,a)'
|
max(1,72-len_trim(ext_msg)-4),'x,a)'
|
||||||
write(0,formatString) '│ ',trim(ext_msg), '│'
|
write(IO_STDERR,formatString) '│ ',trim(ext_msg), '│'
|
||||||
endif
|
endif
|
||||||
if (present(el)) &
|
if (present(el)) &
|
||||||
write(0,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│'
|
||||||
if (present(ip)) &
|
if (present(ip)) &
|
||||||
write(0,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│'
|
||||||
if (present(g)) &
|
if (present(g)) &
|
||||||
write(0,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│'
|
||||||
if (present(instance)) &
|
if (present(instance)) &
|
||||||
write(0,'(a19,1x,i9,44x,a3)') ' │ at instance ',instance, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at instance ',instance, '│'
|
||||||
write(0,'(a,69x,a)') ' │', '│'
|
write(IO_STDERR,'(a,69x,a)') ' │', '│'
|
||||||
write(0,'(a)') ' └'//IO_DIVIDER//'┘'
|
write(IO_STDERR,'(a)') ' └'//IO_DIVIDER//'┘'
|
||||||
flush(0)
|
flush(IO_STDERR)
|
||||||
call quit(9000+error_ID)
|
call quit(9000+error_ID)
|
||||||
!$OMP END CRITICAL (write2out)
|
!$OMP END CRITICAL (write2out)
|
||||||
|
|
||||||
|
@ -623,27 +628,27 @@ subroutine IO_warning(warning_ID,el,ip,g,ext_msg)
|
||||||
end select
|
end select
|
||||||
|
|
||||||
!$OMP CRITICAL (write2out)
|
!$OMP CRITICAL (write2out)
|
||||||
write(6,'(/,a)') ' ┌'//IO_DIVIDER//'┐'
|
write(IO_STDERR,'(/,a)') ' ┌'//IO_DIVIDER//'┐'
|
||||||
write(6,'(a,24x,a,38x,a)') ' │','warning', '│'
|
write(IO_STDERR,'(a,24x,a,38x,a)') ' │','warning', '│'
|
||||||
write(6,'(a,24x,i3,42x,a)') ' │',warning_ID, '│'
|
write(IO_STDERR,'(a,24x,i3,42x,a)') ' │',warning_ID, '│'
|
||||||
write(6,'(a)') ' ├'//IO_DIVIDER//'┤'
|
write(IO_STDERR,'(a)') ' ├'//IO_DIVIDER//'┤'
|
||||||
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',&
|
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',&
|
||||||
max(1,72-len_trim(msg)-4),'x,a)'
|
max(1,72-len_trim(msg)-4),'x,a)'
|
||||||
write(6,formatString) '│ ',trim(msg), '│'
|
write(IO_STDERR,formatString) '│ ',trim(msg), '│'
|
||||||
if (present(ext_msg)) then
|
if (present(ext_msg)) then
|
||||||
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',&
|
write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',&
|
||||||
max(1,72-len_trim(ext_msg)-4),'x,a)'
|
max(1,72-len_trim(ext_msg)-4),'x,a)'
|
||||||
write(6,formatString) '│ ',trim(ext_msg), '│'
|
write(IO_STDERR,formatString) '│ ',trim(ext_msg), '│'
|
||||||
endif
|
endif
|
||||||
if (present(el)) &
|
if (present(el)) &
|
||||||
write(6,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│'
|
||||||
if (present(ip)) &
|
if (present(ip)) &
|
||||||
write(6,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│'
|
||||||
if (present(g)) &
|
if (present(g)) &
|
||||||
write(6,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│'
|
write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│'
|
||||||
write(6,'(a,69x,a)') ' │', '│'
|
write(IO_STDERR,'(a,69x,a)') ' │', '│'
|
||||||
write(6,'(a)') ' └'//IO_DIVIDER//'┘'
|
write(IO_STDERR,'(a)') ' └'//IO_DIVIDER//'┘'
|
||||||
flush(6)
|
flush(IO_STDERR)
|
||||||
!$OMP END CRITICAL (write2out)
|
!$OMP END CRITICAL (write2out)
|
||||||
|
|
||||||
end subroutine IO_warning
|
end subroutine IO_warning
|
||||||
|
|
|
@ -27,7 +27,7 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine base64_init
|
subroutine base64_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- base64 init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- base64 init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
call selfTest
|
call selfTest
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine config_init
|
subroutine config_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- config init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
call parse_material
|
call parse_material
|
||||||
call parse_numerics
|
call parse_numerics
|
||||||
|
@ -59,7 +59,7 @@ subroutine parse_material
|
||||||
inquire(file=fname,exist=fileExists)
|
inquire(file=fname,exist=fileExists)
|
||||||
if(.not. fileExists) call IO_error(100,ext_msg=fname)
|
if(.not. fileExists) call IO_error(100,ext_msg=fname)
|
||||||
endif
|
endif
|
||||||
print*, 'reading '//fname; flush(6)
|
print*, 'reading '//fname; flush(IO_STDOUT)
|
||||||
config_material => YAML_parse_file(fname)
|
config_material => YAML_parse_file(fname)
|
||||||
|
|
||||||
end subroutine parse_material
|
end subroutine parse_material
|
||||||
|
@ -75,7 +75,7 @@ subroutine parse_numerics
|
||||||
config_numerics => emptyDict
|
config_numerics => emptyDict
|
||||||
inquire(file='numerics.yaml', exist=fexist)
|
inquire(file='numerics.yaml', exist=fexist)
|
||||||
if (fexist) then
|
if (fexist) then
|
||||||
print*, 'reading numerics.yaml'; flush(6)
|
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
|
||||||
config_numerics => YAML_parse_file('numerics.yaml')
|
config_numerics => YAML_parse_file('numerics.yaml')
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ subroutine parse_debug
|
||||||
config_debug => emptyDict
|
config_debug => emptyDict
|
||||||
inquire(file='debug.yaml', exist=fexist)
|
inquire(file='debug.yaml', exist=fexist)
|
||||||
fileExists: if (fexist) then
|
fileExists: if (fexist) then
|
||||||
print*, 'reading debug.yaml'; flush(6)
|
print*, 'reading debug.yaml'; flush(IO_STDOUT)
|
||||||
config_debug => YAML_parse_file('debug.yaml')
|
config_debug => YAML_parse_file('debug.yaml')
|
||||||
endif fileExists
|
endif fileExists
|
||||||
|
|
||||||
|
|
|
@ -446,7 +446,7 @@ subroutine constitutive_init
|
||||||
call damage_init
|
call damage_init
|
||||||
call thermal_init
|
call thermal_init
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- constitutive init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- constitutive init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
constitutive_source_maxSizeDotState = 0
|
constitutive_source_maxSizeDotState = 0
|
||||||
PhaseLoop2:do p = 1,phases%length
|
PhaseLoop2:do p = 1,phases%length
|
||||||
|
|
|
@ -100,7 +100,7 @@ module function plastic_disloTungsten_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('disloTungsten')
|
myPlasticity = plastic_active('disloTungsten')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
print*, 'Cereceda et al., International Journal of Plasticity 78:242–256, 2016'
|
print*, 'Cereceda et al., International Journal of Plasticity 78:242–256, 2016'
|
||||||
|
|
|
@ -147,7 +147,7 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('dislotwin')
|
myPlasticity = plastic_active('dislotwin')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
print*, 'Ma and Roters, Acta Materialia 52(12):3603–3612, 2004'
|
print*, 'Ma and Roters, Acta Materialia 52(12):3603–3612, 2004'
|
||||||
|
|
|
@ -71,7 +71,7 @@ module function plastic_isotropic_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('isotropic')
|
myPlasticity = plastic_active('isotropic')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
print*, 'Maiti and Eisenlohr, Scripta Materialia 145:37–40, 2018'
|
print*, 'Maiti and Eisenlohr, Scripta Materialia 145:37–40, 2018'
|
||||||
|
|
|
@ -83,7 +83,7 @@ module function plastic_kinehardening_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('kinehardening')
|
myPlasticity = plastic_active('kinehardening')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
allocate(param(Ninstance))
|
allocate(param(Ninstance))
|
||||||
|
|
|
@ -35,7 +35,7 @@ module function plastic_none_init() result(myPlasticity)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
do p = 1, phases%length
|
do p = 1, phases%length
|
||||||
|
|
|
@ -189,7 +189,7 @@ module function plastic_nonlocal_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('nonlocal')
|
myPlasticity = plastic_active('nonlocal')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) then
|
if(Ninstance == 0) then
|
||||||
call geometry_plastic_nonlocal_disable
|
call geometry_plastic_nonlocal_disable
|
||||||
return
|
return
|
||||||
|
@ -199,7 +199,7 @@ module function plastic_nonlocal_init() result(myPlasticity)
|
||||||
print*, 'https://doi.org/10.1016/j.actamat.2014.03.012'//IO_EOL
|
print*, 'https://doi.org/10.1016/j.actamat.2014.03.012'//IO_EOL
|
||||||
|
|
||||||
print*, 'Kords, Dissertation RWTH Aachen, 2014'
|
print*, 'Kords, Dissertation RWTH Aachen, 2014'
|
||||||
print*, 'http://publications.rwth-aachen.de/record/229993'//IO_EOL
|
print*, 'http://publications.rwth-aachen.de/record/229993'
|
||||||
|
|
||||||
allocate(param(Ninstance))
|
allocate(param(Ninstance))
|
||||||
allocate(state(Ninstance))
|
allocate(state(Ninstance))
|
||||||
|
@ -741,10 +741,10 @@ module subroutine plastic_nonlocal_dependentState(F, Fp, instance, of, ip, el)
|
||||||
if (debugConstitutive%extensive &
|
if (debugConstitutive%extensive &
|
||||||
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip)&
|
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip)&
|
||||||
.or. .not. debugConstitutive%selective)) then
|
.or. .not. debugConstitutive%selective)) then
|
||||||
write(6,'(/,a,i8,1x,i2,1x,i1,/)') '<< CONST >> nonlocal_microstructure at el ip ',el,ip
|
print'(/,a,i8,1x,i2,1x,i1,/)', '<< CONST >> nonlocal_microstructure at el ip ',el,ip
|
||||||
write(6,'(a,/,12x,12(e10.3,1x))') '<< CONST >> rhoForest', stt%rho_forest(:,of)
|
print'(a,/,12x,12(e10.3,1x))', '<< CONST >> rhoForest', stt%rho_forest(:,of)
|
||||||
write(6,'(a,/,12x,12(f10.5,1x))') '<< CONST >> tauThreshold / MPa', dst%tau_pass(:,of)*1e-6
|
print'(a,/,12x,12(f10.5,1x))', '<< CONST >> tauThreshold / MPa', dst%tau_pass(:,of)*1e-6
|
||||||
write(6,'(a,/,12x,12(f10.5,1x),/)') '<< CONST >> tauBack / MPa', dst%tau_back(:,of)*1e-6
|
print'(a,/,12x,12(f10.5,1x),/)', '<< CONST >> tauBack / MPa', dst%tau_back(:,of)*1e-6
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -958,8 +958,8 @@ module subroutine plastic_nonlocal_deltaState(Mp,instance,of,ip,el)
|
||||||
if (debugConstitutive%extensive &
|
if (debugConstitutive%extensive &
|
||||||
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip)&
|
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip)&
|
||||||
.or. .not. debugConstitutive%selective)) then
|
.or. .not. debugConstitutive%selective)) then
|
||||||
write(6,'(a,/,8(12x,12(e12.5,1x),/))') '<< CONST >> dislocation remobilization', deltaRhoRemobilization(:,1:8)
|
print'(a,/,8(12x,12(e12.5,1x),/))', '<< CONST >> dislocation remobilization', deltaRhoRemobilization(:,1:8)
|
||||||
write(6,'(a,/,10(12x,12(e12.5,1x),/),/)') '<< CONST >> dipole dissociation by stress increase', deltaRhoDipole2SingleStress
|
print'(a,/,10(12x,12(e12.5,1x),/),/)', '<< CONST >> dipole dissociation by stress increase', deltaRhoDipole2SingleStress
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -1047,8 +1047,8 @@ module subroutine plastic_nonlocal_dotState(Mp, F, Fp, Temperature,timestep, &
|
||||||
if (debugConstitutive%basic &
|
if (debugConstitutive%basic &
|
||||||
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip) &
|
.and. ((debugConstitutive%element == el .and. debugConstitutive%ip == ip) &
|
||||||
.or. .not. debugConstitutive%selective)) then
|
.or. .not. debugConstitutive%selective)) then
|
||||||
write(6,'(a,/,10(12x,12(e12.5,1x),/))') '<< CONST >> rho / 1/m^2', rhoSgl, rhoDip
|
print'(a,/,10(12x,12(e12.5,1x),/))', '<< CONST >> rho / 1/m^2', rhoSgl, rhoDip
|
||||||
write(6,'(a,/,4(12x,12(e12.5,1x),/))') '<< CONST >> gdot / 1/s',gdot
|
print'(a,/,4(12x,12(e12.5,1x),/))', '<< CONST >> gdot / 1/s',gdot
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -1156,8 +1156,8 @@ module subroutine plastic_nonlocal_dotState(Mp, F, Fp, Temperature,timestep, &
|
||||||
.or. any(rho(:,dip) + rhoDot(:,9:10) * timestep < -prm%atol_rho)) then
|
.or. any(rho(:,dip) + rhoDot(:,9:10) * timestep < -prm%atol_rho)) then
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (debugConstitutive%extensive) then
|
if (debugConstitutive%extensive) then
|
||||||
write(6,'(a,i5,a,i2)') '<< CONST >> evolution rate leads to negative density at el ',el,' ip ',ip
|
print'(a,i5,a,i2)', '<< CONST >> evolution rate leads to negative density at el ',el,' ip ',ip
|
||||||
write(6,'(a)') '<< CONST >> enforcing cutback !!!'
|
print'(a)', '<< CONST >> enforcing cutback !!!'
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
plasticState(ph)%dotState = IEEE_value(1.0_pReal,IEEE_quiet_NaN)
|
plasticState(ph)%dotState = IEEE_value(1.0_pReal,IEEE_quiet_NaN)
|
||||||
|
@ -1268,8 +1268,8 @@ function rhoDotFlux(F,Fp,timestep, instance,of,ip,el)
|
||||||
> IPvolume(ip,el) / maxval(IParea(:,ip,el)))) then ! ...with velocity above critical value (we use the reference volume and area for simplicity here)
|
> IPvolume(ip,el) / maxval(IParea(:,ip,el)))) then ! ...with velocity above critical value (we use the reference volume and area for simplicity here)
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (debugConstitutive%extensive) then
|
if (debugConstitutive%extensive) then
|
||||||
write(6,'(a,i5,a,i2)') '<< CONST >> CFL condition not fullfilled at el ',el,' ip ',ip
|
print'(a,i5,a,i2)', '<< CONST >> CFL condition not fullfilled at el ',el,' ip ',ip
|
||||||
write(6,'(a,e10.3,a,e10.3)') '<< CONST >> velocity is at ', &
|
print'(a,e10.3,a,e10.3)', '<< CONST >> velocity is at ', &
|
||||||
maxval(abs(v0), abs(gdot) > 0.0_pReal &
|
maxval(abs(v0), abs(gdot) > 0.0_pReal &
|
||||||
.and. prm%f_c * abs(v0) * timestep &
|
.and. prm%f_c * abs(v0) * timestep &
|
||||||
> IPvolume(ip,el) / maxval(IParea(:,ip,el))), &
|
> IPvolume(ip,el) / maxval(IParea(:,ip,el))), &
|
||||||
|
|
|
@ -92,7 +92,7 @@ module function plastic_phenopowerlaw_init() result(myPlasticity)
|
||||||
|
|
||||||
myPlasticity = plastic_active('phenopowerlaw')
|
myPlasticity = plastic_active('phenopowerlaw')
|
||||||
Ninstance = count(myPlasticity)
|
Ninstance = count(myPlasticity)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
allocate(param(Ninstance))
|
allocate(param(Ninstance))
|
||||||
|
|
|
@ -294,7 +294,7 @@ subroutine crystallite_init
|
||||||
print'(a42,1x,i10)', ' # of elements: ', eMax
|
print'(a42,1x,i10)', ' # of elements: ', eMax
|
||||||
print'(a42,1x,i10)', ' # of integration points/element: ', iMax
|
print'(a42,1x,i10)', ' # of integration points/element: ', iMax
|
||||||
print'(a42,1x,i10)', 'max # of constituents/integration point: ', cMax
|
print'(a42,1x,i10)', 'max # of constituents/integration point: ', cMax
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -1561,7 +1561,7 @@ subroutine crystallite_restartWrite
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
character(len=pStringLen) :: fileName, datasetName
|
character(len=pStringLen) :: fileName, datasetName
|
||||||
|
|
||||||
print*, ' writing field and constitutive data required for restart to file';flush(6)
|
print*, ' writing field and constitutive data required for restart to file';flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,i0,a)') trim(getSolverJobName())//'_',worldrank,'.hdf5'
|
write(fileName,'(a,i0,a)') trim(getSolverJobName())//'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName,'a')
|
fileHandle = HDF5_openFile(fileName,'a')
|
||||||
|
|
|
@ -49,7 +49,7 @@ subroutine damage_local_init
|
||||||
homog, &
|
homog, &
|
||||||
homogDamage
|
homogDamage
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- damage_local init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- damage_local init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
!----------------------------------------------------------------------------------------------
|
!----------------------------------------------------------------------------------------------
|
||||||
! read numerics parameter and do sanity check
|
! read numerics parameter and do sanity check
|
||||||
|
|
|
@ -922,7 +922,7 @@ subroutine tElement_init(self,elemType)
|
||||||
|
|
||||||
self%nIPneighbors = size(self%IPneighbor,1)
|
self%nIPneighbors = size(self%IPneighbor,1)
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- element_init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- element_init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
print*, 'element type: ',self%elemType
|
print*, 'element type: ',self%elemType
|
||||||
print*, ' geom type: ',self%geomType
|
print*, ' geom type: ',self%geomType
|
||||||
|
|
|
@ -99,10 +99,10 @@ program DAMASK_grid
|
||||||
! init DAMASK (all modules)
|
! init DAMASK (all modules)
|
||||||
|
|
||||||
call CPFEM_initAll
|
call CPFEM_initAll
|
||||||
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- DAMASK_spectral init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
print*, 'Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
||||||
write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80'
|
print*, 'https://doi.org/10.1007/978-981-10-6855-3_80'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize field solver information
|
! initialize field solver information
|
||||||
|
@ -263,56 +263,56 @@ program DAMASK_grid
|
||||||
|
|
||||||
reportAndCheck: if (worldrank == 0) then
|
reportAndCheck: if (worldrank == 0) then
|
||||||
write (loadcase_string, '(i0)' ) currentLoadCase
|
write (loadcase_string, '(i0)' ) currentLoadCase
|
||||||
write(6,'(/,1x,a,i0)') 'load case: ', currentLoadCase
|
print'(/,a,i0)', ' load case: ', currentLoadCase
|
||||||
if (.not. newLoadCase%followFormerTrajectory) &
|
if (.not. newLoadCase%followFormerTrajectory) &
|
||||||
write(6,'(2x,a)') 'drop guessing along trajectory'
|
print*, ' drop guessing along trajectory'
|
||||||
if (newLoadCase%deformation%myType == 'l') then
|
if (newLoadCase%deformation%myType == 'l') then
|
||||||
do j = 1, 3
|
do j = 1, 3
|
||||||
if (any(newLoadCase%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
|
if (any(newLoadCase%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
|
||||||
any(newLoadCase%deformation%maskLogical(j,1:3) .eqv. .false.)) errorID = 832 ! each row should be either fully or not at all defined
|
any(newLoadCase%deformation%maskLogical(j,1:3) .eqv. .false.)) errorID = 832 ! each row should be either fully or not at all defined
|
||||||
enddo
|
enddo
|
||||||
write(6,'(2x,a)') 'velocity gradient:'
|
print*, ' velocity gradient:'
|
||||||
else if (newLoadCase%deformation%myType == 'f') then
|
else if (newLoadCase%deformation%myType == 'f') then
|
||||||
write(6,'(2x,a)') 'deformation gradient at end of load case:'
|
print*, ' deformation gradient at end of load case:'
|
||||||
else
|
else
|
||||||
write(6,'(2x,a)') 'deformation gradient rate:'
|
print*, ' deformation gradient rate:'
|
||||||
endif
|
endif
|
||||||
do i = 1, 3; do j = 1, 3
|
do i = 1, 3; do j = 1, 3
|
||||||
if(newLoadCase%deformation%maskLogical(i,j)) then
|
if(newLoadCase%deformation%maskLogical(i,j)) then
|
||||||
write(6,'(2x,f12.7)',advance='no') newLoadCase%deformation%values(i,j)
|
write(IO_STDOUT,'(2x,f12.7)',advance='no') newLoadCase%deformation%values(i,j)
|
||||||
else
|
else
|
||||||
write(6,'(2x,12a)',advance='no') ' * '
|
write(IO_STDOUT,'(2x,12a)',advance='no') ' * '
|
||||||
endif
|
endif
|
||||||
enddo; write(6,'(/)',advance='no')
|
enddo; write(IO_STDOUT,'(/)',advance='no')
|
||||||
enddo
|
enddo
|
||||||
if (any(newLoadCase%stress%maskLogical .eqv. &
|
if (any(newLoadCase%stress%maskLogical .eqv. &
|
||||||
newLoadCase%deformation%maskLogical)) errorID = 831 ! exclusive or masking only
|
newLoadCase%deformation%maskLogical)) errorID = 831 ! exclusive or masking only
|
||||||
if (any(newLoadCase%stress%maskLogical .and. transpose(newLoadCase%stress%maskLogical) &
|
if (any(newLoadCase%stress%maskLogical .and. transpose(newLoadCase%stress%maskLogical) &
|
||||||
.and. (math_I3<1))) errorID = 838 ! no rotation is allowed by stress BC
|
.and. (math_I3<1))) errorID = 838 ! no rotation is allowed by stress BC
|
||||||
write(6,'(2x,a)') 'stress / GPa:'
|
print*, ' stress / GPa:'
|
||||||
do i = 1, 3; do j = 1, 3
|
do i = 1, 3; do j = 1, 3
|
||||||
if(newLoadCase%stress%maskLogical(i,j)) then
|
if(newLoadCase%stress%maskLogical(i,j)) then
|
||||||
write(6,'(2x,f12.7)',advance='no') newLoadCase%stress%values(i,j)*1e-9_pReal
|
write(IO_STDOUT,'(2x,f12.7)',advance='no') newLoadCase%stress%values(i,j)*1e-9_pReal
|
||||||
else
|
else
|
||||||
write(6,'(2x,12a)',advance='no') ' * '
|
write(IO_STDOUT,'(2x,12a)',advance='no') ' * '
|
||||||
endif
|
endif
|
||||||
enddo; write(6,'(/)',advance='no')
|
enddo; write(IO_STDOUT,'(/)',advance='no')
|
||||||
enddo
|
enddo
|
||||||
if (any(abs(matmul(newLoadCase%rot%asMatrix(), &
|
if (any(abs(matmul(newLoadCase%rot%asMatrix(), &
|
||||||
transpose(newLoadCase%rot%asMatrix()))-math_I3) > &
|
transpose(newLoadCase%rot%asMatrix()))-math_I3) > &
|
||||||
reshape(spread(tol_math_check,1,9),[ 3,3]))) errorID = 846 ! given rotation matrix contains strain
|
reshape(spread(tol_math_check,1,9),[ 3,3]))) errorID = 846 ! given rotation matrix contains strain
|
||||||
if (any(dNeq(newLoadCase%rot%asMatrix(), math_I3))) &
|
if (any(dNeq(newLoadCase%rot%asMatrix(), math_I3))) &
|
||||||
write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
|
write(IO_STDOUT,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
|
||||||
transpose(newLoadCase%rot%asMatrix())
|
transpose(newLoadCase%rot%asMatrix())
|
||||||
if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment
|
if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment
|
||||||
write(6,'(2x,a,f0.3)') 'time: ', newLoadCase%time
|
print'(a,f0.3)', ' time: ', newLoadCase%time
|
||||||
if (newLoadCase%incs < 1) errorID = 835 ! non-positive incs count
|
if (newLoadCase%incs < 1) errorID = 835 ! non-positive incs count
|
||||||
write(6,'(2x,a,i0)') 'increments: ', newLoadCase%incs
|
print'(a,i0)', ' increments: ', newLoadCase%incs
|
||||||
if (newLoadCase%outputfrequency < 1) errorID = 836 ! non-positive result frequency
|
if (newLoadCase%outputfrequency < 1) errorID = 836 ! non-positive result frequency
|
||||||
write(6,'(2x,a,i0)') 'output frequency: ', newLoadCase%outputfrequency
|
print'(a,i0)', ' output frequency: ', newLoadCase%outputfrequency
|
||||||
if (newLoadCase%restartfrequency < 1) errorID = 839 ! non-positive restart frequency
|
if (newLoadCase%restartfrequency < 1) errorID = 839 ! non-positive restart frequency
|
||||||
if (newLoadCase%restartfrequency < huge(0)) &
|
if (newLoadCase%restartfrequency < huge(0)) &
|
||||||
write(6,'(2x,a,i0)') 'restart frequency: ', newLoadCase%restartfrequency
|
print'(a,i0)', ' restart frequency: ', newLoadCase%restartfrequency
|
||||||
if (errorID > 0) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
|
if (errorID > 0) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
|
||||||
endif reportAndCheck
|
endif reportAndCheck
|
||||||
loadCases = [loadCases,newLoadCase] ! load case is ok, append it
|
loadCases = [loadCases,newLoadCase] ! load case is ok, append it
|
||||||
|
@ -341,9 +341,8 @@ program DAMASK_grid
|
||||||
writeHeader: if (interface_restartInc < 1) then
|
writeHeader: if (interface_restartInc < 1) then
|
||||||
open(newunit=statUnit,file=trim(getSolverJobName())//'.sta',form='FORMATTED',status='REPLACE')
|
open(newunit=statUnit,file=trim(getSolverJobName())//'.sta',form='FORMATTED',status='REPLACE')
|
||||||
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
|
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
|
||||||
if (debug_grid%contains('basic')) &
|
if (debug_grid%contains('basic')) print'(/,a)', ' header of statistics file written out'
|
||||||
write(6,'(/,a)') ' header of statistics file written out'
|
flush(IO_STDOUT)
|
||||||
flush(6)
|
|
||||||
else writeHeader
|
else writeHeader
|
||||||
open(newunit=statUnit,file=trim(getSolverJobName())//&
|
open(newunit=statUnit,file=trim(getSolverJobName())//&
|
||||||
'.sta',form='FORMATTED', position='APPEND', status='OLD')
|
'.sta',form='FORMATTED', position='APPEND', status='OLD')
|
||||||
|
@ -351,7 +350,7 @@ program DAMASK_grid
|
||||||
endif
|
endif
|
||||||
|
|
||||||
writeUndeformed: if (interface_restartInc < 1) then
|
writeUndeformed: if (interface_restartInc < 1) then
|
||||||
write(6,'(1/,a)') ' ... writing initial configuration to file ........................'
|
print'(/,a)', ' ... writing initial configuration to file ........................'
|
||||||
call CPFEM_results(0,0.0_pReal)
|
call CPFEM_results(0,0.0_pReal)
|
||||||
endif writeUndeformed
|
endif writeUndeformed
|
||||||
|
|
||||||
|
@ -397,8 +396,8 @@ program DAMASK_grid
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report begin of new step
|
! report begin of new step
|
||||||
write(6,'(/,a)') ' ###########################################################################'
|
print'(/,a)', ' ###########################################################################'
|
||||||
write(6,'(1x,a,es12.5,6(a,i0))') &
|
print'(1x,a,es12.5,6(a,i0))', &
|
||||||
'Time', time, &
|
'Time', time, &
|
||||||
's: Increment ', inc,'/',loadCases(currentLoadCase)%incs,&
|
's: Increment ', inc,'/',loadCases(currentLoadCase)%incs,&
|
||||||
'-', stepFraction,'/',subStepFactor**cutBackLevel,&
|
'-', stepFraction,'/',subStepFactor**cutBackLevel,&
|
||||||
|
@ -406,7 +405,7 @@ program DAMASK_grid
|
||||||
write(incInfo,'(4(a,i0))') &
|
write(incInfo,'(4(a,i0))') &
|
||||||
'Increment ',totalIncsCounter,'/',sum(loadCases%incs),&
|
'Increment ',totalIncsCounter,'/',sum(loadCases%incs),&
|
||||||
'-', stepFraction,'/',subStepFactor**cutBackLevel
|
'-', stepFraction,'/',subStepFactor**cutBackLevel
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! forward fields
|
! forward fields
|
||||||
|
@ -475,7 +474,7 @@ program DAMASK_grid
|
||||||
cutBackLevel = cutBackLevel + 1
|
cutBackLevel = cutBackLevel + 1
|
||||||
time = time - timeinc ! rewind time
|
time = time - timeinc ! rewind time
|
||||||
timeinc = timeinc/real(subStepFactor,pReal) ! cut timestep
|
timeinc = timeinc/real(subStepFactor,pReal) ! cut timestep
|
||||||
write(6,'(/,a)') ' cutting back '
|
print'(/,a)', ' cutting back '
|
||||||
else ! no more options to continue
|
else ! no more options to continue
|
||||||
call IO_warning(850)
|
call IO_warning(850)
|
||||||
if (worldrank == 0) close(statUnit)
|
if (worldrank == 0) close(statUnit)
|
||||||
|
@ -487,14 +486,14 @@ program DAMASK_grid
|
||||||
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
|
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
|
||||||
|
|
||||||
if (all(solres(:)%converged)) then
|
if (all(solres(:)%converged)) then
|
||||||
write(6,'(/,a,i0,a)') ' increment ', totalIncsCounter, ' converged'
|
print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' converged'
|
||||||
else
|
else
|
||||||
write(6,'(/,a,i0,a)') ' increment ', totalIncsCounter, ' NOT converged'
|
print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' NOT converged'
|
||||||
endif; flush(6)
|
endif; flush(IO_STDOUT)
|
||||||
|
|
||||||
if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency
|
if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency
|
||||||
write(6,'(1/,a)') ' ... writing results to file ......................................'
|
print'(1/,a)', ' ... writing results to file ......................................'
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
call CPFEM_results(totalIncsCounter,time)
|
call CPFEM_results(totalIncsCounter,time)
|
||||||
endif
|
endif
|
||||||
if (mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then
|
if (mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then
|
||||||
|
@ -510,7 +509,7 @@ program DAMASK_grid
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report summary of whole calculation
|
! report summary of whole calculation
|
||||||
write(6,'(/,a)') ' ###########################################################################'
|
print'(/,a)', ' ###########################################################################'
|
||||||
if (worldrank == 0) close(statUnit)
|
if (worldrank == 0) close(statUnit)
|
||||||
|
|
||||||
call quit(0) ! no complains ;)
|
call quit(0) ! no complains ;)
|
||||||
|
|
|
@ -56,7 +56,7 @@ subroutine discretization_grid_init(restart)
|
||||||
myGrid !< domain grid of this process
|
myGrid !< domain grid of this process
|
||||||
|
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
microstructureAt
|
materialAt
|
||||||
|
|
||||||
integer :: &
|
integer :: &
|
||||||
j, &
|
j, &
|
||||||
|
@ -65,12 +65,12 @@ subroutine discretization_grid_init(restart)
|
||||||
integer(C_INTPTR_T) :: &
|
integer(C_INTPTR_T) :: &
|
||||||
devNull, z, z_offset
|
devNull, z, z_offset
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- discretization_grid init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
if(index(interface_geomFile,'.vtr') /= 0) then
|
if(index(interface_geomFile,'.vtr') /= 0) then
|
||||||
call readVTR(grid,geomSize,origin,microstructureAt)
|
call readVTR(grid,geomSize,origin,materialAt)
|
||||||
else
|
else
|
||||||
call readGeom(grid,geomSize,origin,microstructureAt)
|
call readGeom(grid,geomSize,origin,materialAt)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
print'(/,a,3(i12 ))', ' grid a b c: ', grid
|
print'(/,a,3(i12 ))', ' grid a b c: ', grid
|
||||||
|
@ -102,10 +102,9 @@ subroutine discretization_grid_init(restart)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! general discretization
|
! general discretization
|
||||||
microstructureAt = microstructureAt(product(grid(1:2))*grid3Offset+1: &
|
materialAt = materialAt(product(grid(1:2))*grid3Offset+1:product(grid(1:2))*(grid3Offset+grid3)) ! reallocate/shrink in case of MPI
|
||||||
product(grid(1:2))*(grid3Offset+grid3)) ! reallocate/shrink in case of MPI
|
|
||||||
|
|
||||||
call discretization_init(microstructureAt, &
|
call discretization_init(materialAt, &
|
||||||
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
||||||
Nodes0(myGrid,mySize,grid3Offset),&
|
Nodes0(myGrid,mySize,grid3Offset),&
|
||||||
merge((grid(1)+1) * (grid(2)+1) * (grid3+1),& ! write bottom layer
|
merge((grid(1)+1) * (grid(2)+1) * (grid3+1),& ! write bottom layer
|
||||||
|
@ -147,7 +146,7 @@ end subroutine discretization_grid_init
|
||||||
!> @details important variables have an implicit "save" attribute. Therefore, this function is
|
!> @details important variables have an implicit "save" attribute. Therefore, this function is
|
||||||
! supposed to be called only once!
|
! supposed to be called only once!
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine readGeom(grid,geomSize,origin,microstructure)
|
subroutine readGeom(grid,geomSize,origin,material)
|
||||||
|
|
||||||
integer, dimension(3), intent(out) :: &
|
integer, dimension(3), intent(out) :: &
|
||||||
grid ! grid (across all processes!)
|
grid ! grid (across all processes!)
|
||||||
|
@ -155,7 +154,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure)
|
||||||
geomSize, & ! size (across all processes!)
|
geomSize, & ! size (across all processes!)
|
||||||
origin ! origin (across all processes!)
|
origin ! origin (across all processes!)
|
||||||
integer, dimension(:), intent(out), allocatable :: &
|
integer, dimension(:), intent(out), allocatable :: &
|
||||||
microstructure
|
material
|
||||||
|
|
||||||
character(len=:), allocatable :: rawData
|
character(len=:), allocatable :: rawData
|
||||||
character(len=65536) :: line
|
character(len=65536) :: line
|
||||||
|
@ -167,7 +166,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure)
|
||||||
startPos, endPos, &
|
startPos, endPos, &
|
||||||
myStat, &
|
myStat, &
|
||||||
l, & !< line counter
|
l, & !< line counter
|
||||||
c, & !< counter for # microstructures in line
|
c, & !< counter for # materials in line
|
||||||
o, & !< order of "to" packing
|
o, & !< order of "to" packing
|
||||||
e, & !< "element", i.e. spectral collocation point
|
e, & !< "element", i.e. spectral collocation point
|
||||||
i, j
|
i, j
|
||||||
|
@ -266,7 +265,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure)
|
||||||
if(any(geomSize < 0.0_pReal)) &
|
if(any(geomSize < 0.0_pReal)) &
|
||||||
call IO_error(error_ID = 842, ext_msg='size (readGeom)')
|
call IO_error(error_ID = 842, ext_msg='size (readGeom)')
|
||||||
|
|
||||||
allocate(microstructure(product(grid)), source = -1) ! too large in case of MPI (shrink later, not very elegant)
|
allocate(material(product(grid)), source = -1) ! too large in case of MPI (shrink later, not very elegant)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! read and interpret content
|
! read and interpret content
|
||||||
|
@ -281,18 +280,18 @@ subroutine readGeom(grid,geomSize,origin,microstructure)
|
||||||
|
|
||||||
noCompression: if (chunkPos(1) /= 3) then
|
noCompression: if (chunkPos(1) /= 3) then
|
||||||
c = chunkPos(1)
|
c = chunkPos(1)
|
||||||
microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
||||||
else noCompression
|
else noCompression
|
||||||
compression: if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'of') then
|
compression: if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'of') then
|
||||||
c = IO_intValue(line,chunkPos,1)
|
c = IO_intValue(line,chunkPos,1)
|
||||||
microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,3),i = 1,IO_intValue(line,chunkPos,1))]
|
material(e:e+c-1) = [(IO_intValue(line,chunkPos,3),i = 1,IO_intValue(line,chunkPos,1))]
|
||||||
else if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'to') then compression
|
else if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'to') then compression
|
||||||
c = abs(IO_intValue(line,chunkPos,3) - IO_intValue(line,chunkPos,1)) + 1
|
c = abs(IO_intValue(line,chunkPos,3) - IO_intValue(line,chunkPos,1)) + 1
|
||||||
o = merge(+1, -1, IO_intValue(line,chunkPos,3) > IO_intValue(line,chunkPos,1))
|
o = merge(+1, -1, IO_intValue(line,chunkPos,3) > IO_intValue(line,chunkPos,1))
|
||||||
microstructure(e:e+c-1) = [(i, i = IO_intValue(line,chunkPos,1),IO_intValue(line,chunkPos,3),o)]
|
material(e:e+c-1) = [(i, i = IO_intValue(line,chunkPos,1),IO_intValue(line,chunkPos,3),o)]
|
||||||
else compression
|
else compression
|
||||||
c = chunkPos(1)
|
c = chunkPos(1)
|
||||||
microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
||||||
endif compression
|
endif compression
|
||||||
endif noCompression
|
endif noCompression
|
||||||
|
|
||||||
|
@ -308,7 +307,7 @@ end subroutine readGeom
|
||||||
!> @brief Parse vtk rectilinear grid (.vtr)
|
!> @brief Parse vtk rectilinear grid (.vtr)
|
||||||
!> @details https://vtk.org/Wiki/VTK_XML_Formats
|
!> @details https://vtk.org/Wiki/VTK_XML_Formats
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine readVTR(grid,geomSize,origin,microstructure)
|
subroutine readVTR(grid,geomSize,origin,material)
|
||||||
|
|
||||||
integer, dimension(3), intent(out) :: &
|
integer, dimension(3), intent(out) :: &
|
||||||
grid ! grid (across all processes!)
|
grid ! grid (across all processes!)
|
||||||
|
@ -316,7 +315,7 @@ subroutine readVTR(grid,geomSize,origin,microstructure)
|
||||||
geomSize, & ! size (across all processes!)
|
geomSize, & ! size (across all processes!)
|
||||||
origin ! origin (across all processes!)
|
origin ! origin (across all processes!)
|
||||||
integer, dimension(:), intent(out), allocatable :: &
|
integer, dimension(:), intent(out), allocatable :: &
|
||||||
microstructure
|
material
|
||||||
|
|
||||||
character(len=:), allocatable :: fileContent, dataType, headerType
|
character(len=:), allocatable :: fileContent, dataType, headerType
|
||||||
logical :: inFile,inGrid,gotCoordinates,gotCellData,compressed
|
logical :: inFile,inGrid,gotCoordinates,gotCellData,compressed
|
||||||
|
@ -364,11 +363,9 @@ subroutine readVTR(grid,geomSize,origin,microstructure)
|
||||||
else
|
else
|
||||||
if(index(fileContent(startPos:endPos),'<CellData>',kind=pI64) /= 0_pI64) then
|
if(index(fileContent(startPos:endPos),'<CellData>',kind=pI64) /= 0_pI64) then
|
||||||
gotCellData = .true.
|
gotCellData = .true.
|
||||||
startPos = endPos + 2_pI64
|
|
||||||
do while (index(fileContent(startPos:endPos),'</CellData>',kind=pI64) == 0_pI64)
|
do while (index(fileContent(startPos:endPos),'</CellData>',kind=pI64) == 0_pI64)
|
||||||
endPos = startPos + index(fileContent(startPos:),IO_EOL,kind=pI64) - 2_pI64
|
|
||||||
if(index(fileContent(startPos:endPos),'<DataArray',kind=pI64) /= 0_pI64 .and. &
|
if(index(fileContent(startPos:endPos),'<DataArray',kind=pI64) /= 0_pI64 .and. &
|
||||||
getXMLValue(fileContent(startPos:endPos),'Name') == 'materialpoint' ) then
|
getXMLValue(fileContent(startPos:endPos),'Name') == 'material' ) then
|
||||||
|
|
||||||
if(getXMLValue(fileContent(startPos:endPos),'format') /= 'binary') &
|
if(getXMLValue(fileContent(startPos:endPos),'format') /= 'binary') &
|
||||||
call IO_error(error_ID = 844, ext_msg='format (materialpoint)')
|
call IO_error(error_ID = 844, ext_msg='format (materialpoint)')
|
||||||
|
@ -377,10 +374,11 @@ subroutine readVTR(grid,geomSize,origin,microstructure)
|
||||||
startPos = endPos + 2_pI64
|
startPos = endPos + 2_pI64
|
||||||
endPos = startPos + index(fileContent(startPos:),IO_EOL,kind=pI64) - 2_pI64
|
endPos = startPos + index(fileContent(startPos:),IO_EOL,kind=pI64) - 2_pI64
|
||||||
s = startPos + verify(fileContent(startPos:endPos),IO_WHITESPACE,kind=pI64) -1_pI64 ! start (no leading whitespace)
|
s = startPos + verify(fileContent(startPos:endPos),IO_WHITESPACE,kind=pI64) -1_pI64 ! start (no leading whitespace)
|
||||||
microstructure = as_Int(fileContent(s:endPos),headerType,compressed,dataType)
|
material = as_Int(fileContent(s:endPos),headerType,compressed,dataType)
|
||||||
exit
|
exit
|
||||||
endif
|
endif
|
||||||
startPos = endPos + 2_pI64
|
startPos = endPos + 2_pI64
|
||||||
|
endPos = startPos + index(fileContent(startPos:),IO_EOL,kind=pI64) - 2_pI64
|
||||||
enddo
|
enddo
|
||||||
elseif(index(fileContent(startPos:endPos),'<Coordinates>',kind=pI64) /= 0_pI64) then
|
elseif(index(fileContent(startPos:endPos),'<Coordinates>',kind=pI64) /= 0_pI64) then
|
||||||
gotCoordinates = .true.
|
gotCoordinates = .true.
|
||||||
|
@ -415,10 +413,10 @@ subroutine readVTR(grid,geomSize,origin,microstructure)
|
||||||
|
|
||||||
end do
|
end do
|
||||||
|
|
||||||
if(.not. allocated(microstructure)) call IO_error(error_ID = 844, ext_msg='materialpoint not found')
|
if(.not. allocated(material)) call IO_error(error_ID = 844, ext_msg='material data not found')
|
||||||
if(size(microstructure) /= product(grid)) call IO_error(error_ID = 844, ext_msg='size(materialpoint)')
|
if(size(material) /= product(grid)) call IO_error(error_ID = 844, ext_msg='size(material)')
|
||||||
if(any(geomSize<=0)) call IO_error(error_ID = 844, ext_msg='size')
|
if(any(geomSize<=0)) call IO_error(error_ID = 844, ext_msg='size')
|
||||||
if(any(grid<1)) call IO_error(error_ID = 844, ext_msg='grid')
|
if(any(grid<1)) call IO_error(error_ID = 844, ext_msg='grid')
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
|
|
@ -22,42 +22,38 @@ module grid_damage_spectral
|
||||||
implicit none
|
implicit none
|
||||||
private
|
private
|
||||||
|
|
||||||
type, private :: tNumerics
|
type :: tNumerics
|
||||||
integer :: &
|
integer :: &
|
||||||
itmax !< max number of iterations
|
itmax !< maximum number of iterations
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
residualStiffness, & !< non-zero residual damage
|
residualStiffness, & !< non-zero residual damage
|
||||||
eps_damage_atol, & !< absolute tolerance for damage evolution
|
eps_damage_atol, & !< absolute tolerance for damage evolution
|
||||||
eps_damage_rtol !< relative tolerance for damage evolution
|
eps_damage_rtol !< relative tolerance for damage evolution
|
||||||
end type tNumerics
|
end type tNumerics
|
||||||
|
|
||||||
type(tNumerics), private :: num
|
type(tNumerics) :: num
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! derived types
|
|
||||||
type(tSolutionParams), private :: params
|
|
||||||
|
|
||||||
|
type(tSolutionParams) :: params
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
SNES, private :: damage_snes
|
SNES :: damage_snes
|
||||||
Vec, private :: solution_vec
|
Vec :: solution_vec
|
||||||
PetscInt, private :: xstart, xend, ystart, yend, zstart, zend
|
PetscInt :: xstart, xend, ystart, yend, zstart, zend
|
||||||
real(pReal), private, dimension(:,:,:), allocatable :: &
|
real(pReal), dimension(:,:,:), allocatable :: &
|
||||||
phi_current, & !< field of current damage
|
phi_current, & !< field of current damage
|
||||||
phi_lastInc, & !< field of previous damage
|
phi_lastInc, & !< field of previous damage
|
||||||
phi_stagInc !< field of staggered damage
|
phi_stagInc !< field of staggered damage
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! reference diffusion tensor, mobility etc.
|
! reference diffusion tensor, mobility etc.
|
||||||
integer, private :: totalIter = 0 !< total iteration in current increment
|
integer :: totalIter = 0 !< total iteration in current increment
|
||||||
real(pReal), dimension(3,3), private :: K_ref
|
real(pReal), dimension(3,3) :: K_ref
|
||||||
real(pReal), private :: mu_ref
|
real(pReal) :: mu_ref
|
||||||
|
|
||||||
public :: &
|
public :: &
|
||||||
grid_damage_spectral_init, &
|
grid_damage_spectral_init, &
|
||||||
grid_damage_spectral_solution, &
|
grid_damage_spectral_solution, &
|
||||||
grid_damage_spectral_forward
|
grid_damage_spectral_forward
|
||||||
private :: &
|
|
||||||
formResidual
|
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
@ -77,10 +73,10 @@ subroutine grid_damage_spectral_init
|
||||||
character(len=pStringLen) :: &
|
character(len=pStringLen) :: &
|
||||||
snes_type
|
snes_type
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- grid_spectral_damage init -+>>>'
|
print'(/,a)', ' <<<+- grid_spectral_damage init -+>>>'
|
||||||
|
|
||||||
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
print*, 'Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
||||||
write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80'
|
print*, 'https://doi.org/10.1007/978-981-10-6855-3_80'
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! read numerical parameters and do sanity checks
|
! read numerical parameters and do sanity checks
|
||||||
|
@ -152,8 +148,6 @@ subroutine grid_damage_spectral_init
|
||||||
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||||
call VecSet(solution_vec,1.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(solution_vec,1.0_pReal,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! damage reference diffusion update
|
|
||||||
call updateReference
|
call updateReference
|
||||||
|
|
||||||
end subroutine grid_damage_spectral_init
|
end subroutine grid_damage_spectral_init
|
||||||
|
@ -210,11 +204,11 @@ function grid_damage_spectral_solution(timeinc,timeinc_old) result(solution)
|
||||||
call VecMin(solution_vec,devNull,phi_min,ierr); CHKERRQ(ierr)
|
call VecMin(solution_vec,devNull,phi_min,ierr); CHKERRQ(ierr)
|
||||||
call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr)
|
call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr)
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
write(6,'(/,a)') ' ... nonlocal damage converged .....................................'
|
print'(/,a)', ' ... nonlocal damage converged .....................................'
|
||||||
write(6,'(/,a,f8.6,2x,f8.6,2x,e11.4,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',&
|
write(IO_STDOUT,'(/,a,f8.6,2x,f8.6,2x,e11.4,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',&
|
||||||
phi_min, phi_max, stagNorm
|
phi_min, phi_max, stagNorm
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
print'(/,a)', ' ==========================================================================='
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
end function grid_damage_spectral_solution
|
end function grid_damage_spectral_solution
|
||||||
|
|
||||||
|
|
|
@ -122,7 +122,7 @@ subroutine grid_mech_FEM_init
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
u_current,u_lastInc
|
u_current,u_lastInc
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- grid_mech_FEM init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- grid_mech_FEM init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
!-----------------------------------------------------------------------------------------------
|
!-----------------------------------------------------------------------------------------------
|
||||||
! debugging options
|
! debugging options
|
||||||
|
@ -130,13 +130,12 @@ subroutine grid_mech_FEM_init
|
||||||
debugRotation = debug_grid%contains('rotation')
|
debugRotation = debug_grid%contains('rotation')
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! read numerical parameter and do sanity checks
|
! read numerical parameters and do sanity checks
|
||||||
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
||||||
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
||||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||||
num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol', defaultVal=1.0e3_pReal)
|
num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol', defaultVal=1.0e3_pReal)
|
||||||
num%eps_stress_rtol = num_grid%get_asFloat ('eps_stress_rtol', defaultVal=0.01_pReal)
|
num%eps_stress_rtol = num_grid%get_asFloat ('eps_stress_rtol', defaultVal=0.01_pReal)
|
||||||
|
|
||||||
num%itmin = num_grid%get_asInt ('itmin',defaultVal=1)
|
num%itmin = num_grid%get_asInt ('itmin',defaultVal=1)
|
||||||
num%itmax = num_grid%get_asInt ('itmax',defaultVal=250)
|
num%itmax = num_grid%get_asInt ('itmax',defaultVal=250)
|
||||||
|
|
||||||
|
@ -225,7 +224,7 @@ subroutine grid_mech_FEM_init
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName)
|
fileHandle = HDF5_openFile(fileName)
|
||||||
|
@ -254,7 +253,7 @@ subroutine grid_mech_FEM_init
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(ierr)
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
||||||
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
||||||
|
|
||||||
|
@ -304,11 +303,11 @@ function grid_mech_FEM_solution(incInfoIn,timeinc,timeinc_old,stress_BC,rotation
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! solve BVP
|
! solve BVP
|
||||||
call SNESsolve(mech_snes,PETSC_NULL_VEC,solution_current,ierr);CHKERRQ(ierr)
|
call SNESsolve(mech_snes,PETSC_NULL_VEC,solution_current,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! check convergence
|
! check convergence
|
||||||
call SNESGetConvergedReason(mech_snes,reason,ierr);CHKERRQ(ierr)
|
call SNESGetConvergedReason(mech_snes,reason,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
|
@ -353,7 +352,7 @@ subroutine grid_mech_FEM_forward(cutBack,guess,timeinc,timeinc_old,loadCaseTime,
|
||||||
F_aimDot = merge(stress_BC%maskFloat*(F_aim-F_aim_lastInc)/timeinc_old, 0.0_pReal, guess)
|
F_aimDot = merge(stress_BC%maskFloat*(F_aim-F_aim_lastInc)/timeinc_old, 0.0_pReal, guess)
|
||||||
F_aim_lastInc = F_aim
|
F_aim_lastInc = F_aim
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!-----------------------------------------------------------------------------------------------
|
||||||
! calculate rate for aim
|
! calculate rate for aim
|
||||||
if (deformation_BC%myType=='l') then ! calculate F_aimDot from given L and current F
|
if (deformation_BC%myType=='l') then ! calculate F_aimDot from given L and current F
|
||||||
F_aimDot = &
|
F_aimDot = &
|
||||||
|
@ -414,7 +413,7 @@ subroutine grid_mech_FEM_restartWrite
|
||||||
call DMDAVecGetArrayF90(mech_grid,solution_current,u_current,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(mech_grid,solution_current,u_current,ierr); CHKERRQ(ierr)
|
||||||
call DMDAVecGetArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
write(6,'(a)') ' writing solver data required for restart to file'; flush(6)
|
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
fileHandle = HDF5_openFile(fileName,'w')
|
||||||
|
@ -476,13 +475,13 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report
|
! report
|
||||||
write(6,'(1/,a)') ' ... reporting .............................................................'
|
print'(1/,a)', ' ... reporting .............................................................'
|
||||||
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
|
print'(1/,a,f12.2,a,es8.2,a,es9.2,a)', ' error divergence = ', &
|
||||||
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
|
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
|
||||||
write(6,'(a,f12.2,a,es8.2,a,es9.2,a)') ' error stress BC = ', &
|
print'(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', &
|
||||||
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
print'(/,a)', ' ==========================================================================='
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
end subroutine converged
|
end subroutine converged
|
||||||
|
|
||||||
|
@ -516,13 +515,13 @@ subroutine formResidual(da_local,x_local, &
|
||||||
! begin of new iteration
|
! begin of new iteration
|
||||||
newIteration: if (totalIter <= PETScIter) then
|
newIteration: if (totalIter <= PETScIter) then
|
||||||
totalIter = totalIter + 1
|
totalIter = totalIter + 1
|
||||||
write(6,'(1x,a,3(a,i0))') trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter+1, '≤', num%itmax
|
print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter+1, '≤', num%itmax
|
||||||
if (debugRotation) &
|
if (debugRotation) &
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim =', transpose(F_aim)
|
' deformation gradient aim =', transpose(F_aim)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif newIteration
|
endif newIteration
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -541,7 +540,7 @@ subroutine formResidual(da_local,x_local, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! evaluate constitutive response
|
! evaluate constitutive response
|
||||||
call Utilities_constitutiveResponse(P_current,&
|
call utilities_constitutiveResponse(P_current,&
|
||||||
P_av,C_volAvg,devNull, &
|
P_av,C_volAvg,devNull, &
|
||||||
F,params%timeinc,params%rotation_BC)
|
F,params%timeinc,params%rotation_BC)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
|
||||||
|
|
|
@ -42,8 +42,7 @@ module grid_mech_spectral_basic
|
||||||
|
|
||||||
type(tNumerics) :: num ! numerics parameters. Better name?
|
type(tNumerics) :: num ! numerics parameters. Better name?
|
||||||
|
|
||||||
logical, private:: &
|
logical, private :: debugRotation
|
||||||
debugRotation
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
|
@ -110,13 +109,13 @@ subroutine grid_mech_spectral_basic_init
|
||||||
character(len=pStringLen) :: &
|
character(len=pStringLen) :: &
|
||||||
fileName
|
fileName
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- grid_mech_spectral_basic init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- grid_mech_spectral_basic init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(6,'(/,a)') ' Eisenlohr et al., International Journal of Plasticity 46:37–53, 2013'
|
print*, 'Eisenlohr et al., International Journal of Plasticity 46:37–53, 2013'
|
||||||
write(6,'(a)') ' https://doi.org/10.1016/j.ijplas.2012.09.012'
|
print*, 'https://doi.org/10.1016/j.ijplas.2012.09.012'//IO_EOL
|
||||||
|
|
||||||
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity 66:31–45, 2015'
|
print*, 'Shanthraj et al., International Journal of Plasticity 66:31–45, 2015'
|
||||||
write(6,'(a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
|
print*, 'https://doi.org/10.1016/j.ijplas.2014.02.006'
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! debugging options
|
! debugging options
|
||||||
|
@ -132,7 +131,6 @@ subroutine grid_mech_spectral_basic_init
|
||||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||||
num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol',defaultVal=1.0e3_pReal)
|
num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol',defaultVal=1.0e3_pReal)
|
||||||
num%eps_stress_rtol = num_grid%get_asFloat ('eps_stress_rtol',defaultVal=0.01_pReal)
|
num%eps_stress_rtol = num_grid%get_asFloat ('eps_stress_rtol',defaultVal=0.01_pReal)
|
||||||
|
|
||||||
num%itmin = num_grid%get_asInt ('itmin',defaultVal=1)
|
num%itmin = num_grid%get_asInt ('itmin',defaultVal=1)
|
||||||
num%itmax = num_grid%get_asInt ('itmax',defaultVal=250)
|
num%itmax = num_grid%get_asInt ('itmax',defaultVal=250)
|
||||||
|
|
||||||
|
@ -186,7 +184,7 @@ subroutine grid_mech_spectral_basic_init
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
|
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
|
||||||
|
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName)
|
fileHandle = HDF5_openFile(fileName)
|
||||||
|
@ -211,7 +209,7 @@ subroutine grid_mech_spectral_basic_init
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
||||||
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
||||||
|
|
||||||
|
@ -377,7 +375,7 @@ subroutine grid_mech_spectral_basic_restartWrite
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
write(6,'(a)') ' writing solver data required for restart to file'; flush(6)
|
print'(a)', ' writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
fileHandle = HDF5_openFile(fileName,'w')
|
||||||
|
@ -437,13 +435,13 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report
|
! report
|
||||||
write(6,'(1/,a)') ' ... reporting .............................................................'
|
print'(1/,a)', ' ... reporting .............................................................'
|
||||||
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
|
print'(1/,a,f12.2,a,es8.2,a,es9.2,a)', ' error divergence = ', &
|
||||||
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
|
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
|
||||||
write(6,'(a,f12.2,a,es8.2,a,es9.2,a)') ' error stress BC = ', &
|
print'(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', &
|
||||||
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
print'(/,a)', ' ==========================================================================='
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
end subroutine converged
|
end subroutine converged
|
||||||
|
|
||||||
|
@ -475,13 +473,13 @@ subroutine formResidual(in, F, &
|
||||||
! begin of new iteration
|
! begin of new iteration
|
||||||
newIteration: if (totalIter <= PETScIter) then
|
newIteration: if (totalIter <= PETScIter) then
|
||||||
totalIter = totalIter + 1
|
totalIter = totalIter + 1
|
||||||
write(6,'(1x,a,3(a,i0))') trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax
|
print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax
|
||||||
if (debugRotation) &
|
if (debugRotation) &
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim =', transpose(F_aim)
|
' deformation gradient aim =', transpose(F_aim)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif newIteration
|
endif newIteration
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -123,10 +123,10 @@ subroutine grid_mech_spectral_polarisation_init
|
||||||
character(len=pStringLen) :: &
|
character(len=pStringLen) :: &
|
||||||
fileName
|
fileName
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- grid_mech_spectral_polarisation init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- grid_mech_spectral_polarisation init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity 66:31–45, 2015'
|
print*, 'Shanthraj et al., International Journal of Plasticity 66:31–45, 2015'
|
||||||
write(6,'(a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
|
print*, 'https://doi.org/10.1016/j.ijplas.2014.02.006'
|
||||||
|
|
||||||
!------------------------------------------------------------------------------------------------
|
!------------------------------------------------------------------------------------------------
|
||||||
! debugging options
|
! debugging options
|
||||||
|
@ -134,9 +134,8 @@ subroutine grid_mech_spectral_polarisation_init
|
||||||
debugRotation = debug_grid%contains('rotation')
|
debugRotation = debug_grid%contains('rotation')
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! read numerical parameters
|
! read numerical parameters and do sanity checks
|
||||||
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
||||||
|
|
||||||
num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.)
|
num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.)
|
||||||
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
||||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||||
|
@ -207,7 +206,7 @@ subroutine grid_mech_spectral_polarisation_init
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName)
|
fileHandle = HDF5_openFile(fileName)
|
||||||
|
@ -236,7 +235,7 @@ subroutine grid_mech_spectral_polarisation_init
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
write(6,'(/,a,i0,a)') ' reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
call HDF5_read(groupHandle,C_volAvg, 'C_volAvg')
|
||||||
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc')
|
||||||
|
|
||||||
|
@ -434,7 +433,7 @@ subroutine grid_mech_spectral_polarisation_restartWrite
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
|
||||||
write(6,'(a)') ' writing solver data required for restart to file'; flush(6)
|
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
fileHandle = HDF5_openFile(fileName,'w')
|
||||||
|
@ -498,15 +497,15 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report
|
! report
|
||||||
write(6,'(1/,a)') ' ... reporting .............................................................'
|
print'(1/,a)', ' ... reporting .............................................................'
|
||||||
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
|
print'(1/,a,f12.2,a,es8.2,a,es9.2,a)', ' error divergence = ', &
|
||||||
err_div/divTol, ' (',err_div, ' / m, tol = ',divTol,')'
|
err_div/divTol, ' (',err_div, ' / m, tol = ',divTol,')'
|
||||||
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error curl = ', &
|
print '(a,f12.2,a,es8.2,a,es9.2,a)', ' error curl = ', &
|
||||||
err_curl/curlTol,' (',err_curl,' -, tol = ',curlTol,')'
|
err_curl/curlTol,' (',err_curl,' -, tol = ',curlTol,')'
|
||||||
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error BC = ', &
|
print '(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', &
|
||||||
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
print'(/,a)', ' ==========================================================================='
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
end subroutine converged
|
end subroutine converged
|
||||||
|
|
||||||
|
@ -558,13 +557,13 @@ subroutine formResidual(in, FandF_tau, &
|
||||||
! begin of new iteration
|
! begin of new iteration
|
||||||
newIteration: if (totalIter <= PETScIter) then
|
newIteration: if (totalIter <= PETScIter) then
|
||||||
totalIter = totalIter + 1
|
totalIter = totalIter + 1
|
||||||
write(6,'(1x,a,3(a,i0))') trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax
|
print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax
|
||||||
if(debugRotation) &
|
if(debugRotation) &
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
|
||||||
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
|
||||||
' deformation gradient aim =', transpose(F_aim)
|
' deformation gradient aim =', transpose(F_aim)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif newIteration
|
endif newIteration
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -23,20 +23,17 @@ module grid_thermal_spectral
|
||||||
implicit none
|
implicit none
|
||||||
private
|
private
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! derived types
|
|
||||||
type(tSolutionParams) :: params
|
|
||||||
|
|
||||||
type :: tNumerics
|
type :: tNumerics
|
||||||
integer :: &
|
integer :: &
|
||||||
itmax !< maximum number of iterations
|
itmax !< maximum number of iterations
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
eps_thermal_atol, & !< absolute tolerance for thermal equilibrium
|
eps_thermal_atol, & !< absolute tolerance for thermal equilibrium
|
||||||
eps_thermal_rtol !< relative tolerance for thermal equilibrium
|
eps_thermal_rtol !< relative tolerance for thermal equilibrium
|
||||||
end type tNumerics
|
end type tNumerics
|
||||||
|
|
||||||
type(tNumerics) :: num
|
type(tNumerics) :: num
|
||||||
|
|
||||||
|
type(tSolutionParams) :: params
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
SNES :: thermal_snes
|
SNES :: thermal_snes
|
||||||
|
@ -74,13 +71,13 @@ subroutine grid_thermal_spectral_init
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_grid
|
num_grid
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- grid_thermal_spectral init -+>>>'
|
print'(/,a)', ' <<<+- grid_thermal_spectral init -+>>>'
|
||||||
|
|
||||||
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
print*, 'Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
||||||
write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80'
|
print*, 'https://doi.org/10.1007/978-981-10-6855-3_80'
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! read numerical parameter and do sanity checks
|
! read numerical parameters and do sanity checks
|
||||||
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
||||||
num%itmax = num_grid%get_asInt ('itmax', defaultVal=250)
|
num%itmax = num_grid%get_asInt ('itmax', defaultVal=250)
|
||||||
num%eps_thermal_atol = num_grid%get_asFloat ('eps_thermal_atol',defaultVal=1.0e-2_pReal)
|
num%eps_thermal_atol = num_grid%get_asFloat ('eps_thermal_atol',defaultVal=1.0e-2_pReal)
|
||||||
|
@ -94,8 +91,7 @@ subroutine grid_thermal_spectral_init
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type ngmres',ierr)
|
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type ngmres',ierr)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(ierr)
|
||||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,&
|
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||||
num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(ierr)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -110,7 +106,7 @@ subroutine grid_thermal_spectral_init
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
grid(1),grid(2),grid(3), & ! global grid
|
grid(1),grid(2),grid(3), & ! global grid
|
||||||
1, 1, worldsize, &
|
1, 1, worldsize, &
|
||||||
1, 0, & ! #dof (thermal phase field), ghost boundary width (domain overlap)
|
1, 0, & ! #dof (T field), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, & ! local grid
|
[grid(1)],[grid(2)],localK, & ! local grid
|
||||||
thermal_grid,ierr) ! handle, error
|
thermal_grid,ierr) ! handle, error
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(ierr)
|
||||||
|
@ -159,8 +155,6 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution)
|
||||||
timeinc_old !< increment in time of last increment
|
timeinc_old !< increment in time of last increment
|
||||||
integer :: i, j, k, cell
|
integer :: i, j, k, cell
|
||||||
type(tSolutionState) :: solution
|
type(tSolutionState) :: solution
|
||||||
class(tNode), pointer :: &
|
|
||||||
num_grid
|
|
||||||
PetscInt :: devNull
|
PetscInt :: devNull
|
||||||
PetscReal :: T_min, T_max, stagNorm, solnNorm
|
PetscReal :: T_min, T_max, stagNorm, solnNorm
|
||||||
|
|
||||||
|
@ -204,11 +198,11 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution)
|
||||||
call VecMin(solution_vec,devNull,T_min,ierr); CHKERRQ(ierr)
|
call VecMin(solution_vec,devNull,T_min,ierr); CHKERRQ(ierr)
|
||||||
call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr)
|
call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr)
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
write(6,'(/,a)') ' ... thermal conduction converged ..................................'
|
print'(/,a)', ' ... thermal conduction converged ..................................'
|
||||||
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',&
|
write(IO_STDOUT,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',&
|
||||||
T_min, T_max, stagNorm
|
T_min, T_max, stagNorm
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
print'(/,a)', ' ==========================================================================='
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
end function grid_thermal_spectral_solution
|
end function grid_thermal_spectral_solution
|
||||||
|
|
||||||
|
|
|
@ -208,10 +208,10 @@ subroutine spectral_utilities_init
|
||||||
debugPETSc = debug_grid%contains('petsc')
|
debugPETSc = debug_grid%contains('petsc')
|
||||||
|
|
||||||
|
|
||||||
if(debugPETSc) write(6,'(3(/,a),/)') &
|
if(debugPETSc) print'(3(/,a),/)', &
|
||||||
' Initializing PETSc with debug options: ', &
|
' Initializing PETSc with debug options: ', &
|
||||||
trim(PETScDebug), &
|
trim(PETScDebug), &
|
||||||
' add more using the PETSc_Options keyword in numerics.yaml '; flush(6)
|
' add more using the PETSc_Options keyword in numerics.yaml '; flush(IO_STDOUT)
|
||||||
|
|
||||||
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
|
||||||
|
|
||||||
|
@ -280,7 +280,7 @@ subroutine spectral_utilities_init
|
||||||
if (pReal /= C_DOUBLE .or. kind(1) /= C_INT) error stop 'C and Fortran datatypes do not match'
|
if (pReal /= C_DOUBLE .or. kind(1) /= C_INT) error stop 'C and Fortran datatypes do not match'
|
||||||
call fftw_set_timelimit(num_grid%get_asFloat('fftw_timelimit',defaultVal=-1.0_pReal))
|
call fftw_set_timelimit(num_grid%get_asFloat('fftw_timelimit',defaultVal=-1.0_pReal))
|
||||||
|
|
||||||
if (debugGeneral) write(6,'(/,a)') ' FFTW initialized'; flush(6)
|
print*, 'FFTW initialized'; flush(IO_STDOUT)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! MPI allocation
|
! MPI allocation
|
||||||
|
@ -506,8 +506,8 @@ subroutine utilities_fourierGammaConvolution(fieldAim)
|
||||||
logical :: err
|
logical :: err
|
||||||
|
|
||||||
|
|
||||||
write(6,'(/,a)') ' ... doing gamma convolution ...............................................'
|
print'(/,a)', ' ... doing gamma convolution ...............................................'
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! do the actual spectral method calculation (mechanical equilibrium)
|
! do the actual spectral method calculation (mechanical equilibrium)
|
||||||
|
@ -576,8 +576,8 @@ real(pReal) function utilities_divergenceRMS()
|
||||||
integer :: i, j, k, ierr
|
integer :: i, j, k, ierr
|
||||||
complex(pReal), dimension(3) :: rescaledGeom
|
complex(pReal), dimension(3) :: rescaledGeom
|
||||||
|
|
||||||
write(6,'(/,a)') ' ... calculating divergence ................................................'
|
print'(/,a)', ' ... calculating divergence ................................................'
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||||
|
|
||||||
|
@ -620,8 +620,8 @@ real(pReal) function utilities_curlRMS()
|
||||||
complex(pReal), dimension(3,3) :: curl_fourier
|
complex(pReal), dimension(3,3) :: curl_fourier
|
||||||
complex(pReal), dimension(3) :: rescaledGeom
|
complex(pReal), dimension(3) :: rescaledGeom
|
||||||
|
|
||||||
write(6,'(/,a)') ' ... calculating curl ......................................................'
|
print'(/,a)', ' ... calculating curl ......................................................'
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||||
|
|
||||||
|
@ -700,10 +700,10 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
temp99_real = math_3333to99(rot_BC%rotate(C))
|
temp99_real = math_3333to99(rot_BC%rotate(C))
|
||||||
|
|
||||||
if(debugGeneral) then
|
if(debugGeneral) then
|
||||||
write(6,'(/,a)') ' ... updating masked compliance ............................................'
|
print'(/,a)', ' ... updating masked compliance ............................................'
|
||||||
write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',&
|
write(IO_STDOUT,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',&
|
||||||
transpose(temp99_Real)*1.0e-9_pReal
|
transpose(temp99_Real)*1.0e-9_pReal
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
do i = 1,9; do j = 1,9
|
do i = 1,9; do j = 1,9
|
||||||
|
@ -723,9 +723,9 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
if (debugGeneral .or. errmatinv) then
|
if (debugGeneral .or. errmatinv) then
|
||||||
write(formatString, '(i2)') size_reduced
|
write(formatString, '(i2)') size_reduced
|
||||||
formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))'
|
formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))'
|
||||||
write(6,trim(formatString),advance='no') ' C * S (load) ', &
|
write(IO_STDOUT,trim(formatString),advance='no') ' C * S (load) ', &
|
||||||
transpose(matmul(c_reduced,s_reduced))
|
transpose(matmul(c_reduced,s_reduced))
|
||||||
write(6,trim(formatString),advance='no') ' S (load) ', transpose(s_reduced)
|
write(IO_STDOUT,trim(formatString),advance='no') ' S (load) ', transpose(s_reduced)
|
||||||
if(errmatinv) call IO_error(error_ID=400,ext_msg='utilities_maskedCompliance')
|
if(errmatinv) call IO_error(error_ID=400,ext_msg='utilities_maskedCompliance')
|
||||||
endif
|
endif
|
||||||
temp99_real = reshape(unpack(reshape(s_reduced,[size_reduced**2]),reshape(mask,[81]),0.0_pReal),[9,9])
|
temp99_real = reshape(unpack(reshape(s_reduced,[size_reduced**2]),reshape(mask,[81]),0.0_pReal),[9,9])
|
||||||
|
@ -736,9 +736,9 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
utilities_maskedCompliance = math_99to3333(temp99_Real)
|
utilities_maskedCompliance = math_99to3333(temp99_Real)
|
||||||
|
|
||||||
if(debugGeneral) then
|
if(debugGeneral) then
|
||||||
write(6,'(/,a,/,9(9(2x,f10.5,1x)/),/)',advance='no') &
|
write(IO_STDOUT,'(/,a,/,9(9(2x,f10.5,1x)/),/)',advance='no') &
|
||||||
' Masked Compliance (load) * GPa =', transpose(temp99_Real)*1.0e9_pReal
|
' Masked Compliance (load) * GPa =', transpose(temp99_Real)*1.0e9_pReal
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
end function utilities_maskedCompliance
|
end function utilities_maskedCompliance
|
||||||
|
@ -822,8 +822,8 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
|
||||||
real(pReal) :: dPdF_norm_max, dPdF_norm_min
|
real(pReal) :: dPdF_norm_max, dPdF_norm_min
|
||||||
real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF
|
real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF
|
||||||
|
|
||||||
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
|
print'(/,a)', ' ... evaluating constitutive response ......................................'
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3]) ! set materialpoint target F to estimated field
|
materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3]) ! set materialpoint target F to estimated field
|
||||||
|
|
||||||
|
@ -833,13 +833,13 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
|
||||||
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P
|
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
||||||
if (debugRotation) &
|
if (debugRotation) &
|
||||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',&
|
write(IO_STDOUT,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',&
|
||||||
transpose(P_av)*1.e-6_pReal
|
transpose(P_av)*1.e-6_pReal
|
||||||
if(present(rotation_BC)) &
|
if(present(rotation_BC)) &
|
||||||
P_av = rotation_BC%rotate(P_av)
|
P_av = rotation_BC%rotate(P_av)
|
||||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
write(IO_STDOUT,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
||||||
transpose(P_av)*1.e-6_pReal
|
transpose(P_av)*1.e-6_pReal
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
dPdF_max = 0.0_pReal
|
dPdF_max = 0.0_pReal
|
||||||
dPdF_norm_max = 0.0_pReal
|
dPdF_norm_max = 0.0_pReal
|
||||||
|
@ -1095,7 +1095,7 @@ subroutine utilities_saveReferenceStiffness
|
||||||
fileUnit,ierr
|
fileUnit,ierr
|
||||||
|
|
||||||
if (worldrank == 0) then
|
if (worldrank == 0) then
|
||||||
write(6,'(a)') ' writing reference stiffness data required for restart to file'; flush(6)
|
print'(a)', ' writing reference stiffness data required for restart to file'; flush(IO_STDOUT)
|
||||||
open(newunit=fileUnit, file=getSolverJobName()//'.C_ref',&
|
open(newunit=fileUnit, file=getSolverJobName()//'.C_ref',&
|
||||||
status='replace',access='stream',action='write',iostat=ierr)
|
status='replace',access='stream',action='write',iostat=ierr)
|
||||||
if(ierr /=0) call IO_error(100,ext_msg='could not open file '//getSolverJobName()//'.C_ref')
|
if(ierr /=0) call IO_error(100,ext_msg='could not open file '//getSolverJobName()//'.C_ref')
|
||||||
|
|
|
@ -186,7 +186,7 @@ subroutine homogenization_init
|
||||||
materialpoint_F = materialpoint_F0 ! initialize to identity
|
materialpoint_F = materialpoint_F0 ! initialize to identity
|
||||||
allocate(materialpoint_P(3,3,discretization_nIP,discretization_nElem), source=0.0_pReal)
|
allocate(materialpoint_P(3,3,discretization_nIP,discretization_nElem), source=0.0_pReal)
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- homogenization init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- homogenization init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
num%nMPstate = num_homogGeneric%get_asInt ('nMPstate', defaultVal=10)
|
num%nMPstate = num_homogGeneric%get_asInt ('nMPstate', defaultVal=10)
|
||||||
num%subStepMinHomog = num_homogGeneric%get_asFloat('subStepMin', defaultVal=1.0e-3_pReal)
|
num%subStepMinHomog = num_homogGeneric%get_asFloat('subStepMin', defaultVal=1.0e-3_pReal)
|
||||||
|
|
|
@ -95,7 +95,7 @@ module subroutine mech_RGC_init(num_homogMech)
|
||||||
print'(/,a)', ' <<<+- homogenization_mech_rgc init -+>>>'
|
print'(/,a)', ' <<<+- homogenization_mech_rgc init -+>>>'
|
||||||
|
|
||||||
Ninstance = count(homogenization_type == HOMOGENIZATION_RGC_ID)
|
Ninstance = count(homogenization_type == HOMOGENIZATION_RGC_ID)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
|
|
||||||
print*, 'Tjahjanto et al., International Journal of Material Forming 2(1):939–942, 2009'
|
print*, 'Tjahjanto et al., International Journal of Material Forming 2(1):939–942, 2009'
|
||||||
print*, 'https://doi.org/10.1007/s12289-009-0619-1'//IO_EOL
|
print*, 'https://doi.org/10.1007/s12289-009-0619-1'//IO_EOL
|
||||||
|
@ -247,7 +247,7 @@ module subroutine mech_RGC_partitionDeformation(F,avgF,instance,of)
|
||||||
print'(1x,3(e15.8,1x))',(F(i,j,iGrain), j = 1,3)
|
print'(1x,3(e15.8,1x))',(F(i,j,iGrain), j = 1,3)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
enddo
|
enddo
|
||||||
|
@ -376,7 +376,7 @@ module procedure mech_RGC_updateState
|
||||||
'@ grain ',stresLoc(3),' in component ',stresLoc(1),stresLoc(2)
|
'@ grain ',stresLoc(3),' in component ',stresLoc(1),stresLoc(2)
|
||||||
print'(a,e15.8,a,i3,a,i2)',' Max residual: ',residMax, &
|
print'(a,e15.8,a,i3,a,i2)',' Max residual: ',residMax, &
|
||||||
' @ iface ',residLoc(1),' in direction ',residLoc(2)
|
' @ iface ',residLoc(1),' in direction ',residLoc(2)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -388,7 +388,7 @@ module procedure mech_RGC_updateState
|
||||||
mech_RGC_updateState = .true.
|
mech_RGC_updateState = .true.
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (debugHomog%extensive .and. prm%of_debug == of) &
|
if (debugHomog%extensive .and. prm%of_debug == of) &
|
||||||
print*, '... done and happy'; flush(6)
|
print*, '... done and happy'; flush(IO_STDOUT)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -416,7 +416,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(a,e15.8,/)', ' Volume discrepancy: ', dst%volumeDiscrepancy(of)
|
print'(a,e15.8,/)', ' Volume discrepancy: ', dst%volumeDiscrepancy(of)
|
||||||
print'(a,e15.8)', ' Maximum relaxation rate: ', dst%relaxationRate_max(of)
|
print'(a,e15.8)', ' Maximum relaxation rate: ', dst%relaxationRate_max(of)
|
||||||
print'(a,e15.8,/)', ' Average relaxation rate: ', dst%relaxationRate_avg(of)
|
print'(a,e15.8,/)', ' Average relaxation rate: ', dst%relaxationRate_avg(of)
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -429,7 +429,7 @@ module procedure mech_RGC_updateState
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (debugHomog%extensive .and. prm%of_debug == of) &
|
if (debugHomog%extensive .and. prm%of_debug == of) &
|
||||||
print'(a,/)', ' ... broken'; flush(6)
|
print'(a,/)', ' ... broken'; flush(IO_STDOUT)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -437,7 +437,7 @@ module procedure mech_RGC_updateState
|
||||||
else ! proceed with computing the Jacobian and state update
|
else ! proceed with computing the Jacobian and state update
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (debugHomog%extensive .and. prm%of_debug == of) &
|
if (debugHomog%extensive .and. prm%of_debug == of) &
|
||||||
print'(a,/)', ' ... not yet done'; flush(6)
|
print'(a,/)', ' ... not yet done'; flush(IO_STDOUT)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
@ -499,7 +499,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,100(e11.4,1x))',(smatrix(i,j), j = 1,3*nIntFaceTot)
|
print'(1x,100(e11.4,1x))',(smatrix(i,j), j = 1,3*nIntFaceTot)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -559,7 +559,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,100(e11.4,1x))',(pmatrix(i,j), j = 1,3*nIntFaceTot)
|
print'(1x,100(e11.4,1x))',(pmatrix(i,j), j = 1,3*nIntFaceTot)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -578,7 +578,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,100(e11.4,1x))',(rmatrix(i,j), j = 1,3*nIntFaceTot)
|
print'(1x,100(e11.4,1x))',(rmatrix(i,j), j = 1,3*nIntFaceTot)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -593,7 +593,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,100(e11.4,1x))',(jmatrix(i,j), j = 1,3*nIntFaceTot)
|
print'(1x,100(e11.4,1x))',(jmatrix(i,j), j = 1,3*nIntFaceTot)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -609,7 +609,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,100(e11.4,1x))',(jnverse(i,j), j = 1,3*nIntFaceTot)
|
print'(1x,100(e11.4,1x))',(jnverse(i,j), j = 1,3*nIntFaceTot)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -625,7 +625,7 @@ module procedure mech_RGC_updateState
|
||||||
!$OMP CRITICAL (write2out)
|
!$OMP CRITICAL (write2out)
|
||||||
print'(a,i3,a,i3,a)',' RGC_updateState: ip ',ip,' | el ',el,' enforces cutback'
|
print'(a,i3,a,i3,a)',' RGC_updateState: ip ',ip,' | el ',el,' enforces cutback'
|
||||||
print'(a,e15.8)',' due to large relaxation change = ',maxval(abs(drelax))
|
print'(a,e15.8)',' due to large relaxation change = ',maxval(abs(drelax))
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
!$OMP END CRITICAL (write2out)
|
!$OMP END CRITICAL (write2out)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
@ -636,7 +636,7 @@ module procedure mech_RGC_updateState
|
||||||
print'(1x,2(e15.8,1x))', stt%relaxationVector(i,of)
|
print'(1x,2(e15.8,1x))', stt%relaxationVector(i,of)
|
||||||
enddo
|
enddo
|
||||||
print*,' '
|
print*,' '
|
||||||
flush(6)
|
flush(IO_STDOUT)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ module subroutine mech_isostrain_init
|
||||||
print'(/,a)', ' <<<+- homogenization_mech_isostrain init -+>>>'
|
print'(/,a)', ' <<<+- homogenization_mech_isostrain init -+>>>'
|
||||||
|
|
||||||
Ninstance = count(homogenization_type == HOMOGENIZATION_ISOSTRAIN_ID)
|
Ninstance = count(homogenization_type == HOMOGENIZATION_ISOSTRAIN_ID)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
|
|
||||||
allocate(param(Ninstance)) ! one container of parameters per instance
|
allocate(param(Ninstance)) ! one container of parameters per instance
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ module subroutine mech_none_init
|
||||||
print'(/,a)', ' <<<+- homogenization_mech_none init -+>>>'
|
print'(/,a)', ' <<<+- homogenization_mech_none init -+>>>'
|
||||||
|
|
||||||
Ninstance = count(homogenization_type == HOMOGENIZATION_NONE_ID)
|
Ninstance = count(homogenization_type == HOMOGENIZATION_NONE_ID)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
|
|
||||||
do h = 1, size(homogenization_type)
|
do h = 1, size(homogenization_type)
|
||||||
if (homogenization_type(h) /= HOMOGENIZATION_NONE_ID) cycle
|
if (homogenization_type(h) /= HOMOGENIZATION_NONE_ID) cycle
|
||||||
|
|
|
@ -49,7 +49,7 @@ module function kinematics_cleavage_opening_init(kinematics_length) result(myKin
|
||||||
|
|
||||||
myKinematics = kinematics_active('cleavage_opening',kinematics_length)
|
myKinematics = kinematics_active('cleavage_opening',kinematics_length)
|
||||||
Ninstance = count(myKinematics)
|
Ninstance = count(myKinematics)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
|
|
|
@ -52,7 +52,7 @@ module function kinematics_slipplane_opening_init(kinematics_length) result(myKi
|
||||||
|
|
||||||
myKinematics = kinematics_active('slipplane_opening',kinematics_length)
|
myKinematics = kinematics_active('slipplane_opening',kinematics_length)
|
||||||
Ninstance = count(myKinematics)
|
Ninstance = count(myKinematics)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
|
|
|
@ -42,7 +42,7 @@ module function kinematics_thermal_expansion_init(kinematics_length) result(myKi
|
||||||
|
|
||||||
myKinematics = kinematics_active('thermal_expansion',kinematics_length)
|
myKinematics = kinematics_active('thermal_expansion',kinematics_length)
|
||||||
Ninstance = count(myKinematics)
|
Ninstance = count(myKinematics)
|
||||||
print'(a,i2)', ' # instances: ',Ninstance; flush(6)
|
print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT)
|
||||||
if(Ninstance == 0) return
|
if(Ninstance == 0) return
|
||||||
|
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
|
|
|
@ -457,7 +457,7 @@ subroutine lattice_init
|
||||||
phase, &
|
phase, &
|
||||||
elasticity
|
elasticity
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- lattice init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- lattice init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
Nphases = phases%length
|
Nphases = phases%length
|
||||||
|
|
|
@ -52,7 +52,7 @@ subroutine discretization_marc_init
|
||||||
type(tElement) :: elem
|
type(tElement) :: elem
|
||||||
|
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
microstructureAt
|
materialAt
|
||||||
integer:: &
|
integer:: &
|
||||||
Nnodes, & !< total number of nodes in the mesh
|
Nnodes, & !< total number of nodes in the mesh
|
||||||
Nelems, & !< total number of elements in the mesh
|
Nelems, & !< total number of elements in the mesh
|
||||||
|
@ -70,7 +70,7 @@ subroutine discretization_marc_init
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_commercialFEM
|
num_commercialFEM
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- discretization_marc init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- discretization_marc init -+>>>'; flush(6)
|
||||||
|
|
||||||
!---------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------
|
||||||
! read debug parameters
|
! read debug parameters
|
||||||
|
@ -83,7 +83,7 @@ subroutine discretization_marc_init
|
||||||
mesh_unitlength = num_commercialFEM%get_asFloat('unitlength',defaultVal=1.0_pReal) ! set physical extent of a length unit in mesh
|
mesh_unitlength = num_commercialFEM%get_asFloat('unitlength',defaultVal=1.0_pReal) ! set physical extent of a length unit in mesh
|
||||||
if (mesh_unitlength <= 0.0_pReal) call IO_error(301,ext_msg='unitlength')
|
if (mesh_unitlength <= 0.0_pReal) call IO_error(301,ext_msg='unitlength')
|
||||||
|
|
||||||
call inputRead(elem,node0_elem,connectivity_elem,microstructureAt)
|
call inputRead(elem,node0_elem,connectivity_elem,materialAt)
|
||||||
nElems = size(connectivity_elem,2)
|
nElems = size(connectivity_elem,2)
|
||||||
|
|
||||||
if (debug_e < 1 .or. debug_e > nElems) call IO_error(602,ext_msg='element')
|
if (debug_e < 1 .or. debug_e > nElems) call IO_error(602,ext_msg='element')
|
||||||
|
@ -103,7 +103,7 @@ subroutine discretization_marc_init
|
||||||
call buildIPcoordinates(IP_reshaped,reshape(connectivity_cell,[elem%NcellNodesPerCell,&
|
call buildIPcoordinates(IP_reshaped,reshape(connectivity_cell,[elem%NcellNodesPerCell,&
|
||||||
elem%nIPs*nElems]),node0_cell)
|
elem%nIPs*nElems]),node0_cell)
|
||||||
|
|
||||||
call discretization_init(microstructureAt,&
|
call discretization_init(materialAt,&
|
||||||
IP_reshaped,&
|
IP_reshaped,&
|
||||||
node0_cell)
|
node0_cell)
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ end subroutine writeGeometry
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Read mesh from marc input file
|
!> @brief Read mesh from marc input file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt)
|
subroutine inputRead(elem,node0_elem,connectivity_elem,materialAt)
|
||||||
|
|
||||||
type(tElement), intent(out) :: elem
|
type(tElement), intent(out) :: elem
|
||||||
real(pReal), dimension(:,:), allocatable, intent(out) :: &
|
real(pReal), dimension(:,:), allocatable, intent(out) :: &
|
||||||
|
@ -180,7 +180,7 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt)
|
||||||
integer, dimension(:,:), allocatable, intent(out) :: &
|
integer, dimension(:,:), allocatable, intent(out) :: &
|
||||||
connectivity_elem
|
connectivity_elem
|
||||||
integer, dimension(:), allocatable, intent(out) :: &
|
integer, dimension(:), allocatable, intent(out) :: &
|
||||||
microstructureAt
|
materialAt
|
||||||
|
|
||||||
integer :: &
|
integer :: &
|
||||||
fileFormatVersion, &
|
fileFormatVersion, &
|
||||||
|
@ -226,9 +226,9 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt)
|
||||||
|
|
||||||
connectivity_elem = inputRead_connectivityElem(nElems,elem%nNodes,inputFile)
|
connectivity_elem = inputRead_connectivityElem(nElems,elem%nNodes,inputFile)
|
||||||
|
|
||||||
call inputRead_microstructure(microstructureAt, &
|
call inputRead_material(materialAt, &
|
||||||
nElems,elem%nNodes,nameElemSet,mapElemSet,&
|
nElems,elem%nNodes,nameElemSet,mapElemSet,&
|
||||||
initialcondTableStyle,inputFile)
|
initialcondTableStyle,inputFile)
|
||||||
end subroutine inputRead
|
end subroutine inputRead
|
||||||
|
|
||||||
|
|
||||||
|
@ -675,13 +675,13 @@ end function inputRead_connectivityElem
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Store microstructure ID
|
!> @brief Store material ID
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine inputRead_microstructure(microstructureAt,&
|
subroutine inputRead_material(materialAt,&
|
||||||
nElem,nNodes,nameElemSet,mapElemSet,initialcondTableStyle,fileContent)
|
nElem,nNodes,nameElemSet,mapElemSet,initialcondTableStyle,fileContent)
|
||||||
|
|
||||||
integer, dimension(:), allocatable, intent(out) :: &
|
integer, dimension(:), allocatable, intent(out) :: &
|
||||||
microstructureAt
|
materialAt
|
||||||
integer, intent(in) :: &
|
integer, intent(in) :: &
|
||||||
nElem, &
|
nElem, &
|
||||||
nNodes, & !< number of nodes per element
|
nNodes, & !< number of nodes per element
|
||||||
|
@ -696,7 +696,7 @@ subroutine inputRead_microstructure(microstructureAt,&
|
||||||
integer :: i,j,t,sv,myVal,e,nNodesAlreadyRead,l,k,m
|
integer :: i,j,t,sv,myVal,e,nNodesAlreadyRead,l,k,m
|
||||||
|
|
||||||
|
|
||||||
allocate(microstructureAt(nElem),source=0)
|
allocate(materialAt(nElem),source=0)
|
||||||
|
|
||||||
do l = 1, size(fileContent)
|
do l = 1, size(fileContent)
|
||||||
chunkPos = IO_stringPos(fileContent(l))
|
chunkPos = IO_stringPos(fileContent(l))
|
||||||
|
@ -715,7 +715,7 @@ subroutine inputRead_microstructure(microstructureAt,&
|
||||||
contInts = continuousIntValues(fileContent(l+k+m+1:),nElem,nameElemSet,mapElemSet,size(nameElemSet)) ! get affected elements
|
contInts = continuousIntValues(fileContent(l+k+m+1:),nElem,nameElemSet,mapElemSet,size(nameElemSet)) ! get affected elements
|
||||||
do i = 1,contInts(1)
|
do i = 1,contInts(1)
|
||||||
e = mesh_FEM2DAMASK_elem(contInts(1+i))
|
e = mesh_FEM2DAMASK_elem(contInts(1+i))
|
||||||
microstructureAt(e) = myVal
|
materialAt(e) = myVal
|
||||||
enddo
|
enddo
|
||||||
if (initialcondTableStyle == 0) m = m + 1
|
if (initialcondTableStyle == 0) m = m + 1
|
||||||
enddo
|
enddo
|
||||||
|
@ -723,9 +723,9 @@ subroutine inputRead_microstructure(microstructureAt,&
|
||||||
endif
|
endif
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
if(any(microstructureAt < 1)) call IO_error(180)
|
if(any(materialAt < 1)) call IO_error(180)
|
||||||
|
|
||||||
end subroutine inputRead_microstructure
|
end subroutine inputRead_material
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -1030,10 +1030,9 @@ pure function IPareaNormal(elem,nElem,connectivity,node)
|
||||||
IPareaNormal(1:3,f,i,e) = math_cross(nodePos(1:3,2) - nodePos(1:3,1), &
|
IPareaNormal(1:3,f,i,e) = math_cross(nodePos(1:3,2) - nodePos(1:3,1), &
|
||||||
nodePos(1:3,3) - nodePos(1:3,1))
|
nodePos(1:3,3) - nodePos(1:3,1))
|
||||||
case (4) ! 3D 8node
|
case (4) ! 3D 8node
|
||||||
! for this cell type we get the normal of the quadrilateral face as an average of
|
! Get the normal of the quadrilateral face as the average of four normals of triangular
|
||||||
! four normals of triangular subfaces; since the face consists only of two triangles,
|
! subfaces. Since the face consists only of two triangles, the sum has to be divided
|
||||||
! the sum has to be divided by two; this whole prcedure tries to compensate for
|
! by two. This procedure tries to compensate for probable non-planar cell surfaces
|
||||||
! probable non-planar cell surfaces
|
|
||||||
IPareaNormal(1:3,f,i,e) = 0.0_pReal
|
IPareaNormal(1:3,f,i,e) = 0.0_pReal
|
||||||
do n = 1, m
|
do n = 1, m
|
||||||
IPareaNormal(1:3,f,i,e) = IPareaNormal(1:3,f,i,e) &
|
IPareaNormal(1:3,f,i,e) = IPareaNormal(1:3,f,i,e) &
|
||||||
|
|
127
src/material.f90
127
src/material.f90
|
@ -164,7 +164,7 @@ subroutine material_init(restart)
|
||||||
material_homogenization
|
material_homogenization
|
||||||
character(len=pStringLen) :: sectionName
|
character(len=pStringLen) :: sectionName
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- material init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- material init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
allocate(material_name_phase(phases%length))
|
allocate(material_name_phase(phases%length))
|
||||||
|
@ -181,10 +181,10 @@ subroutine material_init(restart)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
call material_parseMicrostructure
|
call material_parseMicrostructure
|
||||||
print*, ' Microstructure parsed'
|
print*, 'Microstructure parsed'
|
||||||
|
|
||||||
call material_parseHomogenization
|
call material_parseHomogenization
|
||||||
print*, ' Homogenization parsed'
|
print*, 'Homogenization parsed'
|
||||||
|
|
||||||
|
|
||||||
if(homogenization_maxNgrains > size(material_phaseAt,1)) call IO_error(148)
|
if(homogenization_maxNgrains > size(material_phaseAt,1)) call IO_error(148)
|
||||||
|
@ -227,6 +227,7 @@ end subroutine material_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief parses the homogenization part from the material configuration
|
!> @brief parses the homogenization part from the material configuration
|
||||||
|
! ToDo: This should be done in homogenization
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine material_parseHomogenization
|
subroutine material_parseHomogenization
|
||||||
|
|
||||||
|
@ -320,100 +321,78 @@ end subroutine material_parseHomogenization
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine material_parseMicrostructure
|
subroutine material_parseMicrostructure
|
||||||
|
|
||||||
class(tNode), pointer :: microstructure, & !> pointer to microstructure list
|
class(tNode), pointer :: microstructures, & !> list of microstructures
|
||||||
constituentsInMicrostructure, & !> pointer to a microstructure list item
|
microstructure, & !> microstructure definition
|
||||||
constituents, & !> pointer to constituents list
|
constituents, & !> list of constituents
|
||||||
constituent, & !> pointer to each constituent
|
constituent, & !> constituent definition
|
||||||
phases, &
|
phases, &
|
||||||
homogenization
|
homogenization
|
||||||
|
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
CounterPhase, &
|
counterPhase, &
|
||||||
CounterHomogenization
|
counterHomogenization
|
||||||
|
|
||||||
|
|
||||||
real(pReal), dimension(:,:), allocatable :: &
|
|
||||||
microstructure_fraction !< vol fraction of each constituent in microstrcuture
|
|
||||||
|
|
||||||
|
real(pReal) :: &
|
||||||
|
frac
|
||||||
integer :: &
|
integer :: &
|
||||||
e, &
|
e, &
|
||||||
i, &
|
i, &
|
||||||
m, &
|
m, &
|
||||||
c, &
|
c, &
|
||||||
microstructure_maxNconstituents
|
maxNconstituents
|
||||||
|
|
||||||
real(pReal), dimension(4) :: phase_orientation
|
microstructures => config_material%get('microstructure')
|
||||||
|
if(any(discretization_microstructureAt > microstructures%length)) &
|
||||||
|
call IO_error(155,ext_msg='More microstructures requested than found in material.yaml')
|
||||||
|
|
||||||
homogenization => config_material%get('homogenization')
|
allocate(microstructure_Nconstituents(microstructures%length),source=0)
|
||||||
phases => config_material%get('phase')
|
do m = 1, microstructures%length
|
||||||
microstructure => config_material%get('microstructure')
|
microstructure => microstructures%get(m)
|
||||||
allocate(microstructure_Nconstituents(microstructure%length), source = 0)
|
constituents => microstructure%get('constituents')
|
||||||
|
|
||||||
if(any(discretization_microstructureAt > microstructure%length)) &
|
|
||||||
call IO_error(155,ext_msg='More microstructures in geometry than sections in material.yaml')
|
|
||||||
|
|
||||||
do m = 1, microstructure%length
|
|
||||||
constituentsInMicrostructure => microstructure%get(m)
|
|
||||||
constituents => constituentsInMicrostructure%get('constituents')
|
|
||||||
microstructure_Nconstituents(m) = constituents%length
|
microstructure_Nconstituents(m) = constituents%length
|
||||||
enddo
|
enddo
|
||||||
|
maxNconstituents = maxval(microstructure_Nconstituents)
|
||||||
microstructure_maxNconstituents = maxval(microstructure_Nconstituents)
|
|
||||||
allocate(microstructure_fraction(microstructure_maxNconstituents,microstructure%length), source =0.0_pReal)
|
allocate(material_homogenizationAt(discretization_nElem),source=0)
|
||||||
allocate(material_phaseAt(microstructure_maxNconstituents,discretization_nElem), source =0)
|
|
||||||
allocate(material_orientation0(microstructure_maxNconstituents,discretization_nIP,discretization_nElem))
|
|
||||||
allocate(material_homogenizationAt(discretization_nElem))
|
|
||||||
allocate(material_homogenizationMemberAt(discretization_nIP,discretization_nElem),source=0)
|
allocate(material_homogenizationMemberAt(discretization_nIP,discretization_nElem),source=0)
|
||||||
allocate(material_phaseMemberAt(microstructure_maxNconstituents,discretization_nIP,discretization_nElem),source=0)
|
allocate(material_phaseAt(maxNconstituents,discretization_nElem),source=0)
|
||||||
|
allocate(material_phaseMemberAt(maxNconstituents,discretization_nIP,discretization_nElem),source=0)
|
||||||
|
|
||||||
allocate(CounterPhase(phases%length),source=0)
|
allocate(material_orientation0(maxNconstituents,discretization_nIP,discretization_nElem))
|
||||||
allocate(CounterHomogenization(homogenization%length),source=0)
|
|
||||||
|
phases => config_material%get('phase')
|
||||||
|
allocate(counterPhase(phases%length),source=0)
|
||||||
|
homogenization => config_material%get('homogenization')
|
||||||
|
allocate(counterHomogenization(homogenization%length),source=0)
|
||||||
|
|
||||||
do m = 1, microstructure%length
|
do e = 1, discretization_nElem
|
||||||
constituentsInMicrostructure => microstructure%get(m)
|
microstructure => microstructures%get(discretization_microstructureAt(e))
|
||||||
constituents => constituentsInMicrostructure%get('constituents')
|
constituents => microstructure%get('constituents')
|
||||||
|
|
||||||
|
material_homogenizationAt(e) = homogenization%getIndex(microstructure%get_asString('homogenization'))
|
||||||
|
do i = 1, discretization_nIP
|
||||||
|
counterHomogenization(material_homogenizationAt(e)) = counterHomogenization(material_homogenizationAt(e)) + 1
|
||||||
|
material_homogenizationMemberAt(i,e) = counterHomogenization(material_homogenizationAt(e))
|
||||||
|
enddo
|
||||||
|
|
||||||
|
frac = 0.0_pReal
|
||||||
do c = 1, constituents%length
|
do c = 1, constituents%length
|
||||||
constituent => constituents%get(c)
|
constituent => constituents%get(c)
|
||||||
microstructure_fraction(c,m) = constituent%get_asFloat('fraction')
|
frac = frac + constituent%get_asFloat('fraction')
|
||||||
enddo
|
|
||||||
if (dNeq(sum(microstructure_fraction(:,m)),1.0_pReal)) call IO_error(153,ext_msg='constituent')
|
material_phaseAt(c,e) = phases%getIndex(constituent%get_asString('phase'))
|
||||||
enddo
|
do i = 1, discretization_nIP
|
||||||
|
counterPhase(material_phaseAt(c,e)) = counterPhase(material_phaseAt(c,e)) + 1
|
||||||
do e = 1, discretization_nElem
|
material_phaseMemberAt(c,i,e) = counterPhase(material_phaseAt(c,e))
|
||||||
do i = 1, discretization_nIP
|
|
||||||
constituentsInMicrostructure => microstructure%get(discretization_microstructureAt(e))
|
call material_orientation0(c,i,e)%fromQuaternion(constituent%get_asFloats('orientation',requiredSize=4))
|
||||||
constituents => constituentsInMicrostructure%get('constituents')
|
|
||||||
do c = 1, constituents%length
|
|
||||||
constituent => constituents%get(c)
|
|
||||||
material_phaseAt(c,e) = phases%getIndex(constituent%get_asString('phase'))
|
|
||||||
phase_orientation = constituent%get_asFloats('orientation')
|
|
||||||
call material_orientation0(c,i,e)%fromQuaternion(phase_orientation)
|
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
enddo
|
enddo
|
||||||
|
if (dNeq(frac,1.0_pReal)) call IO_error(153,ext_msg='constituent')
|
||||||
|
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
do e = 1, discretization_nElem
|
|
||||||
do i = 1, discretization_nIP
|
|
||||||
constituentsInMicrostructure => microstructure%get(discretization_microstructureAt(e))
|
|
||||||
material_homogenizationAt(e) = homogenization%getIndex(constituentsInMicrostructure%get_asString('homogenization'))
|
|
||||||
CounterHomogenization(material_homogenizationAt(e)) = CounterHomogenization(material_homogenizationAt(e)) + 1
|
|
||||||
material_homogenizationMemberAt(i,e) = CounterHomogenization(material_homogenizationAt(e))
|
|
||||||
enddo
|
|
||||||
enddo
|
|
||||||
|
|
||||||
do e = 1, discretization_nElem
|
|
||||||
do i = 1, discretization_nIP
|
|
||||||
constituentsInMicrostructure => microstructure%get(discretization_microstructureAt(e))
|
|
||||||
constituents => constituentsInMicrostructure%get('constituents')
|
|
||||||
do c = 1, constituents%length
|
|
||||||
CounterPhase(material_phaseAt(c,e)) = &
|
|
||||||
CounterPhase(material_phaseAt(c,e)) + 1
|
|
||||||
material_phaseMemberAt(c,i,e) = CounterPhase(material_phaseAt(c,e))
|
|
||||||
enddo
|
|
||||||
enddo
|
|
||||||
enddo
|
|
||||||
|
|
||||||
|
|
||||||
end subroutine material_parseMicrostructure
|
end subroutine material_parseMicrostructure
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,7 @@ subroutine math_init
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_generic
|
num_generic
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- math init -+>>>'; flush(6)
|
print'(/,a)', ' <<<+- math init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
num_generic => config_numerics%get('generic',defaultVal=emptyDict)
|
num_generic => config_numerics%get('generic',defaultVal=emptyDict)
|
||||||
randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0)
|
randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue