Merge branch 'development' into geom-fully-out-of-place
This commit is contained in:
commit
683161d479
|
@ -25,6 +25,7 @@ before_script:
|
||||||
fi
|
fi
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
|
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
- source $DAMASKROOT/env/DAMASK.sh
|
- source $DAMASKROOT/env/DAMASK.sh
|
||||||
- cd $DAMASKROOT/PRIVATE/testing
|
- cd $DAMASKROOT/PRIVATE/testing
|
||||||
|
@ -87,6 +88,7 @@ checkout:
|
||||||
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
|
- echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue
|
||||||
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
- while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ];
|
||||||
do sleep 5m;
|
do sleep 5m;
|
||||||
|
echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n";
|
||||||
done
|
done
|
||||||
script:
|
script:
|
||||||
- mkdir -p $DAMASKROOT
|
- mkdir -p $DAMASKROOT
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit 555f3e01f2b5cf43ade1bd48423b890adca21771
|
Subproject commit fa2885da8604c3ac46f64670393f04678dd1f45b
|
|
@ -42,11 +42,10 @@ rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi])
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
coord = - table.get(options.frame)
|
coord = - table.get(options.frame)
|
||||||
coord[:,2] += table.get(options.depth)[:,0]
|
coord[:,2] += table.get(options.depth)[:,0]
|
||||||
|
|
||||||
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
|
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
|
.save((sys.stdout if name is None else name),legacy=True)
|
||||||
table.to_file(sys.stdout if name is None else name)
|
|
||||||
|
|
|
@ -39,10 +39,10 @@ if options.labels is None:
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
|
table = table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
|
||||||
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
|
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name),legacy=True)
|
||||||
|
|
|
@ -38,8 +38,8 @@ for filename in options.filenames:
|
||||||
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
||||||
N_digits = 5 # hack to keep test intact
|
N_digits = 5 # hack to keep test intact
|
||||||
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
|
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
|
||||||
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})
|
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\
|
||||||
table = table.add('pos',coords.reshape(-1,3))
|
.add('pos',coords.reshape(-1,3))
|
||||||
|
|
||||||
results.pick('materialpoints',False)
|
results.pick('materialpoints',False)
|
||||||
results.pick('constituents', True)
|
results.pick('constituents', True)
|
||||||
|
@ -60,4 +60,4 @@ for filename in options.filenames:
|
||||||
os.mkdir(dirname,0o755)
|
os.mkdir(dirname,0o755)
|
||||||
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
||||||
inc[3:].zfill(N_digits))
|
inc[3:].zfill(N_digits))
|
||||||
table.to_file(os.path.join(dirname,file_out))
|
table.save(os.path.join(dirname,file_out),legacy=True)
|
||||||
|
|
|
@ -172,7 +172,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
||||||
|
@ -191,4 +191,4 @@ for name in filenames:
|
||||||
volumeMismatch.reshape(-1,1,order='F'),
|
volumeMismatch.reshape(-1,1,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
|
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -14,9 +14,9 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def derivative(coordinates,what):
|
def derivative(coordinates,what):
|
||||||
|
|
||||||
result = np.empty_like(what)
|
result = np.empty_like(what)
|
||||||
|
|
||||||
# use differentiation by interpolation
|
# use differentiation by interpolation
|
||||||
# as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf
|
# as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ def derivative(coordinates,what):
|
||||||
(coordinates[0] - coordinates[1])
|
(coordinates[0] - coordinates[1])
|
||||||
result[-1,:] = (what[-1,:] - what[-2,:]) / \
|
result[-1,:] = (what[-1,:] - what[-2,:]) / \
|
||||||
(coordinates[-1] - coordinates[-2])
|
(coordinates[-1] - coordinates[-2])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -65,10 +65,10 @@ if options.labels is None:
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
table = table.add('d({})/d({})'.format(label,options.coordinates),
|
table = table.add('d({})/d({})'.format(label,options.coordinates),
|
||||||
derivative(table.get(options.coordinates),table.get(label)),
|
derivative(table.get(options.coordinates),table.get(label)),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -47,25 +47,25 @@ parser.set_defaults(f = 'f',
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
|
||||||
if options.nodal:
|
if options.nodal:
|
||||||
table = damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'),
|
damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'),
|
||||||
{'pos':(3,)})\
|
{'pos':(3,)})\
|
||||||
.add('avg({}).{}'.format(options.f,options.pos),
|
.add('avg({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))\
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
.add('fluct({}).{}'.format(options.f,options.pos),
|
.add('fluct({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
|
.save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True)
|
||||||
else:
|
else:
|
||||||
table = table.add('avg({}).{}'.format(options.f,options.pos),
|
table.add('avg({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))\
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
.add('fluct({}).{}'.format(options.f,options.pos),
|
.add('fluct({}).{}'.format(options.f,options.pos),
|
||||||
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))\
|
||||||
table.to_file(sys.stdout if name is None else name)
|
.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
|
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -142,7 +142,7 @@ for i,feature in enumerate(features):
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
neighborhood = neighborhoods[options.neighborhood]
|
neighborhood = neighborhoods[options.neighborhood]
|
||||||
|
@ -158,7 +158,7 @@ for name in filenames:
|
||||||
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
|
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
|
||||||
# ...to every one in the specified neighborhood
|
# ...to every one in the specified neighborhood
|
||||||
# for same IDs at both locations ==> 0
|
# for same IDs at both locations ==> 0
|
||||||
|
|
||||||
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
|
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
|
||||||
# ...reflects number of unique neighbors
|
# ...reflects number of unique neighbors
|
||||||
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
||||||
|
@ -184,4 +184,4 @@ for name in filenames:
|
||||||
distance[i,:],
|
distance[i,:],
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -63,7 +63,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
damask.grid_filters.coord0_check(table.get(options.pos))
|
damask.grid_filters.coord0_check(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -73,4 +73,4 @@ for name in filenames:
|
||||||
mode = 'wrap' if options.periodic else 'nearest'),
|
mode = 'wrap' if options.periodic else 'nearest'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.')
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
for label in options.labels:
|
for label in options.labels:
|
||||||
|
@ -55,4 +55,4 @@ for name in filenames:
|
||||||
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
|
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -110,7 +110,7 @@ R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degree
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
if options.eulers is not None:
|
if options.eulers is not None:
|
||||||
label = options.eulers
|
label = options.eulers
|
||||||
|
@ -147,4 +147,4 @@ for name in filenames:
|
||||||
if 'axisangle' in options.output:
|
if 'axisangle' in options.output:
|
||||||
table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -175,7 +175,7 @@ labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
||||||
|
|
||||||
|
@ -189,4 +189,4 @@ for name in filenames:
|
||||||
for i,label in enumerate(labels):
|
for i,label in enumerate(labels):
|
||||||
table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -27,7 +27,7 @@ def sortingList(labels,whitelistitems):
|
||||||
else:
|
else:
|
||||||
indices.append(0)
|
indices.append(0)
|
||||||
names.append(label)
|
names.append(label)
|
||||||
|
|
||||||
return [indices,names,whitelistitems]
|
return [indices,names,whitelistitems]
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,11 +72,11 @@ for name in filenames:
|
||||||
continue
|
continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ assemble info ---------------------------------------
|
# ------------------------------------------ assemble info ---------------------------------------
|
||||||
|
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
specials = { \
|
specials = { \
|
||||||
'_row_': 0,
|
'_row_': 0,
|
||||||
|
@ -103,12 +103,12 @@ for name in filenames:
|
||||||
else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem
|
else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem
|
||||||
else:
|
else:
|
||||||
order = range(len(labels)) # maintain original order of labels
|
order = range(len(labels)) # maintain original order of labels
|
||||||
|
|
||||||
# --------------------------------------- evaluate condition ---------------------------------------
|
# --------------------------------------- evaluate condition ---------------------------------------
|
||||||
if options.condition is not None:
|
if options.condition is not None:
|
||||||
condition = options.condition # copy per file, since might be altered inline
|
condition = options.condition # copy per file, since might be altered inline
|
||||||
breaker = False
|
breaker = False
|
||||||
|
|
||||||
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
|
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
|
||||||
idx = table.label_index(column)
|
idx = table.label_index(column)
|
||||||
dim = table.label_dimension(column)
|
dim = table.label_dimension(column)
|
||||||
|
@ -123,11 +123,11 @@ for name in filenames:
|
||||||
's#':'str'}[marker],idx) # take float or string value of data column
|
's#':'str'}[marker],idx) # take float or string value of data column
|
||||||
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
||||||
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
||||||
|
|
||||||
condition = condition.replace('#'+all+'#',replacement)
|
condition = condition.replace('#'+all+'#',replacement)
|
||||||
|
|
||||||
if breaker: continue # found mistake in condition evaluation --> next file
|
if breaker: continue # found mistake in condition evaluation --> next file
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
@ -138,7 +138,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ process and output data ------------------------------------------
|
# ------------------------------------------ process and output data ------------------------------------------
|
||||||
|
|
||||||
positions = np.array(positions)[order]
|
positions = np.array(positions)[order]
|
||||||
|
|
||||||
atOnce = options.condition is None
|
atOnce = options.condition is None
|
||||||
if atOnce: # read full array and filter columns
|
if atOnce: # read full array and filter columns
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -47,7 +47,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
rng = np.random.default_rng(randomSeed)
|
rng = np.random.default_rng(randomSeed)
|
||||||
|
@ -58,4 +58,4 @@ for name in filenames:
|
||||||
rng.shuffle(uniques)
|
rng.shuffle(uniques)
|
||||||
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
|
table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save((sys.stdout if name is None else name), legacy=True)
|
||||||
|
|
|
@ -154,4 +154,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False)
|
geom.save_ASCII(os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
|
|
|
@ -89,4 +89,4 @@ geom=damask.Geom(microstructure,options.size,
|
||||||
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
geom.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -142,4 +142,4 @@ geom = damask.Geom(microstructure.reshape(grid),
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
geom.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -68,7 +68,7 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow
|
table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||||
|
|
||||||
|
@ -105,5 +105,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
|
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
format='ASCII',pack=False)
|
|
||||||
|
|
|
@ -171,7 +171,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
size = np.ones(3)
|
size = np.ones(3)
|
||||||
origin = np.zeros(3)
|
origin = np.zeros(3)
|
||||||
|
@ -228,5 +228,4 @@ for name in filenames:
|
||||||
homogenization=options.homogenization,comments=header)
|
homogenization=options.homogenization,comments=header)
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',
|
geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False)
|
||||||
format='ASCII',pack=False)
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
grid_original = geom.grid
|
grid_original = geom.grid
|
||||||
damask.util.croak(geom)
|
damask.util.croak(geom)
|
||||||
|
@ -169,7 +169,7 @@ for name in filenames:
|
||||||
# undo any changes involving immutable microstructures
|
# undo any changes involving immutable microstructures
|
||||||
microstructure = np.where(immutable, microstructure_original,microstructure)
|
microstructure = np.where(immutable, microstructure_original,microstructure)
|
||||||
|
|
||||||
geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])
|
geom = geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False)
|
geom.save_ASCII(sys.stdout if name is None else name,compress=False)
|
||||||
|
|
|
@ -31,7 +31,7 @@ def binAsBins(bin,intervals):
|
||||||
bins[1] = (bin//intervals[2]) % intervals[1]
|
bins[1] = (bin//intervals[2]) % intervals[1]
|
||||||
bins[2] = bin % intervals[2]
|
bins[2] = bin % intervals[2]
|
||||||
return bins
|
return bins
|
||||||
|
|
||||||
def binsAsBin(bins,intervals):
|
def binsAsBin(bins,intervals):
|
||||||
"""Implode 3D bins into compound bin."""
|
"""Implode 3D bins into compound bin."""
|
||||||
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
||||||
|
@ -95,7 +95,7 @@ def directInversion (ODF,nSamples):
|
||||||
float(nInvSamples)/nOptSamples-1.0,
|
float(nInvSamples)/nOptSamples-1.0,
|
||||||
scale,nSamples))
|
scale,nSamples))
|
||||||
repetition = [None]*ODF['nBins'] # preallocate and clear
|
repetition = [None]*ODF['nBins'] # preallocate and clear
|
||||||
|
|
||||||
for bin in range(ODF['nBins']): # loop over bins
|
for bin in range(ODF['nBins']): # loop over bins
|
||||||
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ def directInversion (ODF,nSamples):
|
||||||
for bin in range(ODF['nBins']):
|
for bin in range(ODF['nBins']):
|
||||||
set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation
|
set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation
|
||||||
i += repetition[bin] # advance set counter
|
i += repetition[bin] # advance set counter
|
||||||
|
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
@ -117,7 +117,7 @@ def directInversion (ODF,nSamples):
|
||||||
orientations[j] = np.degrees(Eulers)
|
orientations[j] = np.degrees(Eulers)
|
||||||
reconstructedODF[bin] += unitInc
|
reconstructedODF[bin] += unitInc
|
||||||
set[ex] = set[j] # exchange orientations
|
set[ex] = set[j] # exchange orientations
|
||||||
|
|
||||||
return orientations, reconstructedODF
|
return orientations, reconstructedODF
|
||||||
|
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ def MonteCarloEulers (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
for j in range(nSamples):
|
for j in range(nSamples):
|
||||||
MC = maxdV_V*2.0
|
MC = maxdV_V*2.0
|
||||||
bin = 0
|
bin = 0
|
||||||
|
@ -153,7 +153,7 @@ def MonteCarloBins (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
for j in range(nSamples):
|
for j in range(nSamples):
|
||||||
MC = maxdV_V*2.0
|
MC = maxdV_V*2.0
|
||||||
bin = 0
|
bin = 0
|
||||||
|
@ -173,14 +173,14 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
|
||||||
selectors = [random.random() for i in range(nSamples)]
|
selectors = [random.random() for i in range(nSamples)]
|
||||||
selectors.sort()
|
selectors.sort()
|
||||||
indexSelector = 0
|
indexSelector = 0
|
||||||
|
|
||||||
cumdV_V = 0.0
|
cumdV_V = 0.0
|
||||||
countSamples = 0
|
countSamples = 0
|
||||||
|
|
||||||
for bin in range(ODF['nBins']) :
|
for bin in range(ODF['nBins']) :
|
||||||
cumdV_V += ODF['dV_V'][bin]
|
cumdV_V += ODF['dV_V'][bin]
|
||||||
while indexSelector < nSamples and selectors[indexSelector] < cumdV_V:
|
while indexSelector < nSamples and selectors[indexSelector] < cumdV_V:
|
||||||
|
@ -191,7 +191,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
indexSelector += 1
|
indexSelector += 1
|
||||||
|
|
||||||
damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
||||||
|
|
||||||
return orientations, reconstructedODF
|
return orientations, reconstructedODF
|
||||||
|
|
||||||
|
|
||||||
|
@ -233,8 +233,8 @@ if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
random.seed(randomSeed)
|
random.seed(randomSeed)
|
||||||
|
@ -253,7 +253,7 @@ for name in filenames:
|
||||||
if eulers.shape[0] != ODF['nBins']:
|
if eulers.shape[0] != ODF['nBins']:
|
||||||
damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0]))
|
damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# ----- build binnedODF array and normalize ------------------------------------------------------
|
# ----- build binnedODF array and normalize ------------------------------------------------------
|
||||||
sumdV_V = 0.0
|
sumdV_V = 0.0
|
||||||
ODF['dV_V'] = [None]*ODF['nBins']
|
ODF['dV_V'] = [None]*ODF['nBins']
|
||||||
|
@ -267,7 +267,7 @@ for name in filenames:
|
||||||
if ODF['dV_V'][b] > 0.0:
|
if ODF['dV_V'][b] > 0.0:
|
||||||
sumdV_V += ODF['dV_V'][b]
|
sumdV_V += ODF['dV_V'][b]
|
||||||
ODF['nNonZero'] += 1
|
ODF['nNonZero'] += 1
|
||||||
|
|
||||||
for b in range(ODF['nBins']):
|
for b in range(ODF['nBins']):
|
||||||
ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
||||||
|
|
||||||
|
@ -277,19 +277,19 @@ for name in filenames:
|
||||||
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
||||||
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
||||||
])
|
])
|
||||||
|
|
||||||
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
||||||
method = Functions[options.algorithm]
|
method = Functions[options.algorithm]
|
||||||
|
|
||||||
Orientations, ReconstructedODF = (globals()[method])(ODF,options.number)
|
Orientations, ReconstructedODF = (globals()[method])(ODF,options.number)
|
||||||
|
|
||||||
# calculate accuracy of sample
|
# calculate accuracy of sample
|
||||||
squaredDiff = {'orig':0.0,method:0.0}
|
squaredDiff = {'orig':0.0,method:0.0}
|
||||||
squaredRelDiff = {'orig':0.0,method:0.0}
|
squaredRelDiff = {'orig':0.0,method:0.0}
|
||||||
mutualProd = {'orig':0.0,method:0.0}
|
mutualProd = {'orig':0.0,method:0.0}
|
||||||
indivSum = {'orig':0.0,method:0.0}
|
indivSum = {'orig':0.0,method:0.0}
|
||||||
indivSquaredSum = {'orig':0.0,method:0.0}
|
indivSquaredSum = {'orig':0.0,method:0.0}
|
||||||
|
|
||||||
for bin in range(ODF['nBins']):
|
for bin in range(ODF['nBins']):
|
||||||
squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2
|
squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2
|
||||||
if ODF['dV_V'][bin] > 0.0:
|
if ODF['dV_V'][bin] > 0.0:
|
||||||
|
@ -299,7 +299,7 @@ for name in filenames:
|
||||||
indivSquaredSum[method] += ReconstructedODF[bin]**2
|
indivSquaredSum[method] += ReconstructedODF[bin]**2
|
||||||
indivSum['orig'] += ODF['dV_V'][bin]
|
indivSum['orig'] += ODF['dV_V'][bin]
|
||||||
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
||||||
|
|
||||||
damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]),
|
damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]),
|
||||||
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
||||||
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
||||||
|
@ -311,10 +311,10 @@ for name in filenames:
|
||||||
(ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\
|
(ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\
|
||||||
(indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))),
|
(indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))),
|
||||||
])
|
])
|
||||||
|
|
||||||
if method == 'IA' and options.number < ODF['nNonZero']:
|
if method == 'IA' and options.number < ODF['nNonZero']:
|
||||||
strOpt = '(%i)'%ODF['nNonZero']
|
strOpt = '(%i)'%ODF['nNonZero']
|
||||||
|
|
||||||
formatwidth = 1+int(math.log10(options.number))
|
formatwidth = 1+int(math.log10(options.number))
|
||||||
|
|
||||||
materialConfig = [
|
materialConfig = [
|
||||||
|
@ -324,12 +324,12 @@ for name in filenames:
|
||||||
'<microstructure>',
|
'<microstructure>',
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
]
|
]
|
||||||
|
|
||||||
for i,ID in enumerate(range(options.number)):
|
for i,ID in enumerate(range(options.number)):
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
||||||
]
|
]
|
||||||
|
|
||||||
materialConfig += [
|
materialConfig += [
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
'<texture>',
|
'<texture>',
|
||||||
|
@ -338,12 +338,12 @@ for name in filenames:
|
||||||
|
|
||||||
for ID in range(options.number):
|
for ID in range(options.number):
|
||||||
eulers = Orientations[ID]
|
eulers = Orientations[ID]
|
||||||
|
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
||||||
]
|
]
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile:
|
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile:
|
||||||
outfile.write('\n'.join(materialConfig)+'\n')
|
outfile.write('\n'.join(materialConfig)+'\n')
|
||||||
|
|
|
@ -130,10 +130,10 @@ def geometry():
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def initial_conditions(microstructures):
|
def initial_conditions(materials):
|
||||||
elements = []
|
elements = []
|
||||||
element = 0
|
element = 0
|
||||||
for id in microstructures:
|
for id in materials:
|
||||||
element += 1
|
element += 1
|
||||||
if len(elements) < id:
|
if len(elements) < id:
|
||||||
for i in range(id-len(elements)):
|
for i in range(id-len(elements)):
|
||||||
|
@ -153,7 +153,7 @@ def initial_conditions(microstructures):
|
||||||
for grain,elementList in enumerate(elements):
|
for grain,elementList in enumerate(elements):
|
||||||
cmds.append([\
|
cmds.append([\
|
||||||
"*new_icond",
|
"*new_icond",
|
||||||
"*icond_name microstructure_%i"%(grain+1),
|
"*icond_name material_%i"%(grain+1),
|
||||||
"*icond_type state_variable",
|
"*icond_type state_variable",
|
||||||
"*icond_param_value state_var_id 2",
|
"*icond_param_value state_var_id 2",
|
||||||
"*icond_dof_value var %i"%(grain+1),
|
"*icond_dof_value var %i"%(grain+1),
|
||||||
|
@ -196,15 +196,15 @@ if filenames == []: filenames = [None]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
microstructure = geom.microstructure.flatten(order='F')
|
materials = geom.materials.flatten(order='F')
|
||||||
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
init(),
|
init(),
|
||||||
mesh(geom.grid,geom.size),
|
mesh(geom.grid,geom.size),
|
||||||
material(),
|
material(),
|
||||||
geometry(),
|
geometry(),
|
||||||
initial_conditions(microstructure),
|
initial_conditions(materials),
|
||||||
'*identify_sets',
|
'*identify_sets',
|
||||||
'*show_model',
|
'*show_model',
|
||||||
'*redraw',
|
'*redraw',
|
||||||
|
|
|
@ -78,7 +78,7 @@ class myThread (threading.Thread):
|
||||||
perturbedSeedsVFile = StringIO()
|
perturbedSeedsVFile = StringIO()
|
||||||
myBestSeedsVFile.seek(0)
|
myBestSeedsVFile.seek(0)
|
||||||
|
|
||||||
perturbedSeedsTable = damask.Table.from_ASCII(myBestSeedsVFile)
|
perturbedSeedsTable = damask.Table.load(myBestSeedsVFile)
|
||||||
coords = perturbedSeedsTable.get('pos')
|
coords = perturbedSeedsTable.get('pos')
|
||||||
i = 0
|
i = 0
|
||||||
for ms,coord in enumerate(coords):
|
for ms,coord in enumerate(coords):
|
||||||
|
@ -89,8 +89,7 @@ class myThread (threading.Thread):
|
||||||
coords[i]=newCoords
|
coords[i]=newCoords
|
||||||
direction[i]*=2.
|
direction[i]*=2.
|
||||||
i+= 1
|
i+= 1
|
||||||
perturbedSeedsTable.set('pos',coords)
|
perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True)
|
||||||
perturbedSeedsTable.to_file(perturbedSeedsVFile)
|
|
||||||
|
|
||||||
#--- do tesselation with perturbed seed file ------------------------------------------------------
|
#--- do tesselation with perturbed seed file ------------------------------------------------------
|
||||||
perturbedGeomVFile.close()
|
perturbedGeomVFile.close()
|
||||||
|
@ -101,7 +100,7 @@ class myThread (threading.Thread):
|
||||||
perturbedGeomVFile.seek(0)
|
perturbedGeomVFile.seek(0)
|
||||||
|
|
||||||
#--- evaluate current seeds file ------------------------------------------------------------------
|
#--- evaluate current seeds file ------------------------------------------------------------------
|
||||||
perturbedGeom = damask.Geom.from_file(perturbedGeomVFile)
|
perturbedGeom = damask.Geom.load_ASCII(perturbedGeomVFile)
|
||||||
myNmicrostructures = len(np.unique(perturbedGeom.microstructure))
|
myNmicrostructures = len(np.unique(perturbedGeom.microstructure))
|
||||||
currentData=np.bincount(perturbedGeom.microstructure.ravel())[1:]/points
|
currentData=np.bincount(perturbedGeom.microstructure.ravel())[1:]/points
|
||||||
currentError=[]
|
currentError=[]
|
||||||
|
@ -213,14 +212,14 @@ if options.randomSeed is None:
|
||||||
options.randomSeed = int(os.urandom(4).hex(),16)
|
options.randomSeed = int(os.urandom(4).hex(),16)
|
||||||
damask.util.croak(options.randomSeed)
|
damask.util.croak(options.randomSeed)
|
||||||
delta = options.scale/np.array(options.grid)
|
delta = options.scale/np.array(options.grid)
|
||||||
baseFile=os.path.splitext(os.path.basename(options.seedFile))[0]
|
baseFile = os.path.splitext(os.path.basename(options.seedFile))[0]
|
||||||
points = np.array(options.grid).prod().astype('float')
|
points = np.array(options.grid).prod().astype('float')
|
||||||
|
|
||||||
# ----------- calculate target distribution and bin edges
|
# ----------- calculate target distribution and bin edges
|
||||||
targetGeom = damask.Geom.from_file(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
|
targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
|
||||||
nMicrostructures = len(np.unique(targetGeom.microstructure))
|
nMicrostructures = len(np.unique(targetGeom.microstructure))
|
||||||
targetVolFrac = np.bincount(targetGeom.microstructure.flatten())/targetGeom.grid.prod().astype(np.float)
|
targetVolFrac = np.bincount(targetGeom.microstructure.flatten())/targetGeom.grid.prod().astype(np.float)
|
||||||
target=[]
|
target = []
|
||||||
for i in range(1,nMicrostructures+1):
|
for i in range(1,nMicrostructures+1):
|
||||||
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
||||||
target.append({'histogram':targetHist,'bins':targetBins})
|
target.append({'histogram':targetHist,'bins':targetBins})
|
||||||
|
@ -243,7 +242,7 @@ initialGeomVFile = StringIO()
|
||||||
initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+
|
||||||
' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0])
|
' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0])
|
||||||
initialGeomVFile.seek(0)
|
initialGeomVFile.seek(0)
|
||||||
initialGeom = damask.Geom.from_file(initialGeomVFile)
|
initialGeom = damask.Geom.load_ASCII(initialGeomVFile)
|
||||||
|
|
||||||
if len(np.unique(targetGeom.microstructure)) != nMicrostructures:
|
if len(np.unique(targetGeom.microstructure)) != nMicrostructures:
|
||||||
damask.util.croak('error. Microstructure count mismatch')
|
damask.util.croak('error. Microstructure count mismatch')
|
||||||
|
@ -273,8 +272,8 @@ sys.stdout.flush()
|
||||||
initialGeomVFile.close()
|
initialGeomVFile.close()
|
||||||
|
|
||||||
# start mulithreaded monte carlo simulation
|
# start mulithreaded monte carlo simulation
|
||||||
threads=[]
|
threads = []
|
||||||
s=threading.Semaphore(1)
|
s = threading.Semaphore(1)
|
||||||
|
|
||||||
for i in range(options.threads):
|
for i in range(options.threads):
|
||||||
threads.append(myThread(i))
|
threads.append(myThread(i))
|
||||||
|
|
|
@ -17,7 +17,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Create seed file taking microstructure indices from given geom file.
|
Create seed file taking material indices from given geom file.
|
||||||
Indices can be black-listed or white-listed.
|
Indices can be black-listed or white-listed.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
@ -46,12 +46,12 @@ options.blacklist = [int(i) for i in options.blacklist]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
microstructure = geom.microstructure.reshape((-1,1),order='F')
|
materials = geom.materials.reshape((-1,1),order='F')
|
||||||
|
|
||||||
mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \
|
mask = np.logical_and(np.in1d(materials,options.whitelist,invert=False) if options.whitelist else \
|
||||||
np.full(geom.grid.prod(),True,dtype=bool),
|
np.full(geom.grid.prod(),True,dtype=bool),
|
||||||
np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \
|
np.in1d(materials,options.blacklist,invert=True) if options.blacklist else \
|
||||||
np.full(geom.grid.prod(),True,dtype=bool))
|
np.full(geom.grid.prod(),True,dtype=bool))
|
||||||
|
|
||||||
seeds = damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F')
|
seeds = damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F')
|
||||||
|
@ -63,6 +63,6 @@ for name in filenames:
|
||||||
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
|
||||||
'homogenization\t{}'.format(geom.homogenization)]
|
'homogenization\t{}'.format(geom.homogenization)]
|
||||||
|
|
||||||
table = damask.Table(seeds[mask],{'pos':(3,)},comments)
|
damask.Table(seeds[mask],{'pos':(3,)},comments)\
|
||||||
table = table.add('microstructure',microstructure[mask])
|
.add('material',materials[mask].astype(int))\
|
||||||
table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds')
|
.save(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True)
|
||||||
|
|
|
@ -52,7 +52,7 @@ options.box = np.array(options.box).reshape(3,2)
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
|
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
|
||||||
box = np.amax(options.box, axis=1) \
|
box = np.amax(options.box, axis=1) \
|
||||||
|
@ -91,6 +91,6 @@ for name in filenames:
|
||||||
'homogenization\t{}'.format(geom.homogenization)]
|
'homogenization\t{}'.format(geom.homogenization)]
|
||||||
|
|
||||||
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
|
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
|
||||||
table.set('microstructure',table.get('microstructure').astype(np.int))
|
table.set('microstructure',table.get('microstructure').astype(np.int))\
|
||||||
table.to_file(sys.stdout if name is None else \
|
.save(sys.stdout if name is None else \
|
||||||
os.path.splitext(name)[0]+f'_poked_{options.N}.seeds')
|
os.path.splitext(name)[0]+f'_poked_{options.N}.seeds',legacy=True)
|
||||||
|
|
|
@ -154,12 +154,12 @@ for name in filenames:
|
||||||
'randomSeed\t{}'.format(options.randomSeed),
|
'randomSeed\t{}'.format(options.randomSeed),
|
||||||
]
|
]
|
||||||
|
|
||||||
table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)
|
table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)\
|
||||||
table = table.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int))
|
.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int))
|
||||||
|
|
||||||
if options.weights:
|
if options.weights:
|
||||||
weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \
|
weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \
|
||||||
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
|
else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)
|
||||||
table = table.add('weight',weights)
|
table = table.add('weight',weights)
|
||||||
|
|
||||||
table.to_file(sys.stdout if name is None else name)
|
table.save(sys.stdout if name is None else name,legacy=True)
|
||||||
|
|
|
@ -18,6 +18,7 @@ from ._lattice import Symmetry, Lattice# noqa
|
||||||
from ._orientation import Orientation # noqa
|
from ._orientation import Orientation # noqa
|
||||||
from ._result import Result # noqa
|
from ._result import Result # noqa
|
||||||
from ._geom import Geom # noqa
|
from ._geom import Geom # noqa
|
||||||
|
from ._material import Material # noqa
|
||||||
from . import solver # noqa
|
from . import solver # noqa
|
||||||
|
|
||||||
# deprecated
|
# deprecated
|
||||||
|
|
|
@ -235,100 +235,128 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname=None,format='ParaView'):
|
|
||||||
|
def save_paraview(self,fname=None):
|
||||||
"""
|
"""
|
||||||
Export colormap to file for use in external programs.
|
Write colormap to JSON file for Paraview.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : file, str, or pathlib.Path, optional.
|
fname : file, str, or pathlib.Path, optional.
|
||||||
Filename to store results. If not given, the filename will
|
Filename to store results. If not given, the filename will
|
||||||
consist of the name of the colormap and an extension that
|
consist of the name of the colormap and extension '.json'.
|
||||||
depends on the file format.
|
|
||||||
format : {'ParaView', 'ASCII', 'GOM', 'gmsh'}, optional
|
|
||||||
File format, defaults to 'ParaView'. Available formats are:
|
|
||||||
- ParaView: JSON file, extension '.json'.
|
|
||||||
- ASCII: Plain text file, extension '.txt'.
|
|
||||||
- GOM: Aramis GOM (DIC), extension '.legend'.
|
|
||||||
- Gmsh: Gmsh FEM mesh-generator, extension '.msh'.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if fname is not None:
|
if fname is not None:
|
||||||
try:
|
try:
|
||||||
f = open(fname,'w')
|
fhandle = open(fname,'w')
|
||||||
except TypeError:
|
except TypeError:
|
||||||
f = fname
|
fhandle = fname
|
||||||
else:
|
else:
|
||||||
f = None
|
fhandle = None
|
||||||
|
|
||||||
if format.lower() == 'paraview':
|
|
||||||
Colormap._export_paraview(self,f)
|
|
||||||
elif format.lower() == 'ascii':
|
|
||||||
Colormap._export_ASCII(self,f)
|
|
||||||
elif format.lower() == 'gom':
|
|
||||||
Colormap._export_GOM(self,f)
|
|
||||||
elif format.lower() == 'gmsh':
|
|
||||||
Colormap._export_gmsh(self,f)
|
|
||||||
else:
|
|
||||||
raise ValueError('Unknown output format: {format}.')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_paraview(colormap,fhandle=None):
|
|
||||||
"""Write colormap to JSON file for Paraview."""
|
|
||||||
colors = []
|
colors = []
|
||||||
for i,c in enumerate(np.round(colormap.colors,6).tolist()):
|
for i,c in enumerate(np.round(self.colors,6).tolist()):
|
||||||
colors+=[i]+c
|
colors+=[i]+c
|
||||||
|
|
||||||
out = [{
|
out = [{
|
||||||
'Creator':util.execution_stamp('Colormap'),
|
'Creator':util.execution_stamp('Colormap'),
|
||||||
'ColorSpace':'RGB',
|
'ColorSpace':'RGB',
|
||||||
'Name':colormap.name,
|
'Name':self.name,
|
||||||
'DefaultMap':True,
|
'DefaultMap':True,
|
||||||
'RGBPoints':colors
|
'RGBPoints':colors
|
||||||
}]
|
}]
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.json', 'w') as f:
|
with open(self.name.replace(' ','_')+'.json', 'w') as f:
|
||||||
json.dump(out, f,indent=4)
|
json.dump(out, f,indent=4)
|
||||||
else:
|
else:
|
||||||
json.dump(out,fhandle,indent=4)
|
json.dump(out,fhandle,indent=4)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_ASCII(colormap,fhandle=None):
|
def save_ASCII(self,fname=None):
|
||||||
"""Write colormap to ASCII table."""
|
"""
|
||||||
labels = {'RGBA':4} if colormap.colors.shape[1] == 4 else {'RGB': 3}
|
Write colormap to ASCII table.
|
||||||
t = Table(colormap.colors,labels,f'Creator: {util.execution_stamp("Colormap")}')
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.txt'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
|
|
||||||
|
labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3}
|
||||||
|
t = Table(self.colors,labels,f'Creator: {util.execution_stamp("Colormap")}')
|
||||||
|
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.txt', 'w') as f:
|
with open(self.name.replace(' ','_')+'.txt', 'w') as f:
|
||||||
t.to_file(f,new_style=True)
|
t.save(f)
|
||||||
else:
|
else:
|
||||||
t.to_file(fhandle,new_style=True)
|
t.save(fhandle)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _export_GOM(colormap,fhandle=None):
|
def save_GOM(self,fname=None):
|
||||||
"""Write colormap to GOM Aramis compatible format."""
|
"""
|
||||||
|
Write colormap to GOM Aramis compatible format.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.legend'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
# ToDo: test in GOM
|
# ToDo: test in GOM
|
||||||
GOM_str = f'1 1 {colormap.name.replace(" ","_")} 9 {colormap.name.replace(" ","_")} ' \
|
GOM_str = '1 1 {name} 9 {name} '.format(name=self.name.replace(" ","_")) \
|
||||||
+ '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \
|
+ '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \
|
||||||
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colormap.colors)}' \
|
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(self.colors)}' \
|
||||||
+ ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((colormap.colors*255).astype(int))]) \
|
+ ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((self.colors*255).astype(int))]) \
|
||||||
+ '\n'
|
+ '\n'
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.legend', 'w') as f:
|
with open(self.name.replace(' ','_')+'.legend', 'w') as f:
|
||||||
f.write(GOM_str)
|
f.write(GOM_str)
|
||||||
else:
|
else:
|
||||||
fhandle.write(GOM_str)
|
fhandle.write(GOM_str)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
def save_gmsh(self,fname=None):
|
||||||
def _export_gmsh(colormap,fhandle=None):
|
"""
|
||||||
"""Write colormap to Gmsh compatible format."""
|
Write colormap to Gmsh compatible format.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional.
|
||||||
|
Filename to store results. If not given, the filename will
|
||||||
|
consist of the name of the colormap and extension '.msh'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fname is not None:
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
else:
|
||||||
|
fhandle = None
|
||||||
# ToDo: test in gmsh
|
# ToDo: test in gmsh
|
||||||
gmsh_str = 'View.ColorTable = {\n' \
|
gmsh_str = 'View.ColorTable = {\n' \
|
||||||
+'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in colormap.colors[:,:3]*255]) \
|
+'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in self.colors[:,:3]*255]) \
|
||||||
+'\n}\n'
|
+'\n}\n'
|
||||||
if fhandle is None:
|
if fhandle is None:
|
||||||
with open(colormap.name.replace(' ','_')+'.msh', 'w') as f:
|
with open(self.name.replace(' ','_')+'.msh', 'w') as f:
|
||||||
f.write(gmsh_str)
|
f.write(gmsh_str)
|
||||||
else:
|
else:
|
||||||
fhandle.write(gmsh_str)
|
fhandle.write(gmsh_str)
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
import sys
|
|
||||||
import copy
|
import copy
|
||||||
import multiprocessing
|
import multiprocessing as mp
|
||||||
from io import StringIO
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -60,11 +58,11 @@ class Geom:
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Basic information on geometry definition."""
|
"""Basic information on geometry definition."""
|
||||||
return util.srepr([
|
return util.srepr([
|
||||||
f'grid a b c: {util.srepr(self.grid, " x ")}',
|
f'grid a b c: {util.srepr(self.grid, " x ")}',
|
||||||
f'size x y z: {util.srepr(self.size, " x ")}',
|
f'size x y z: {util.srepr(self.size, " x ")}',
|
||||||
f'origin x y z: {util.srepr(self.origin," ")}',
|
f'origin x y z: {util.srepr(self.origin," ")}',
|
||||||
f'# materialpoints: {self.N_materials}',
|
f'# materials: {self.N_materials}',
|
||||||
f'max materialpoint: {np.nanmax(self.materials)}',
|
f'max material: {np.nanmax(self.materials)}',
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
|
@ -102,12 +100,12 @@ class Geom:
|
||||||
message.append(util.emph( f'origin x y z: {util.srepr( self.origin," ")}'))
|
message.append(util.emph( f'origin x y z: {util.srepr( self.origin," ")}'))
|
||||||
|
|
||||||
if other.N_materials != self.N_materials:
|
if other.N_materials != self.N_materials:
|
||||||
message.append(util.delete(f'# materialpoints: {other.N_materials}'))
|
message.append(util.delete(f'# materials: {other.N_materials}'))
|
||||||
message.append(util.emph( f'# materialpoints: { self.N_materials}'))
|
message.append(util.emph( f'# materials: { self.N_materials}'))
|
||||||
|
|
||||||
if np.nanmax(other.materials) != np.nanmax(self.materials):
|
if np.nanmax(other.materials) != np.nanmax(self.materials):
|
||||||
message.append(util.delete(f'max materialpoint: {np.nanmax(other.materials)}'))
|
message.append(util.delete(f'max material: {np.nanmax(other.materials)}'))
|
||||||
message.append(util.emph( f'max materialpoint: {np.nanmax( self.materials)}'))
|
message.append(util.emph( f'max material: {np.nanmax( self.materials)}'))
|
||||||
|
|
||||||
return util.return_message(message)
|
return util.return_message(message)
|
||||||
|
|
||||||
|
@ -123,7 +121,7 @@ class Geom:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_file(fname):
|
def load_ASCII(fname):
|
||||||
"""
|
"""
|
||||||
Read a geom file.
|
Read a geom file.
|
||||||
|
|
||||||
|
@ -187,7 +185,7 @@ class Geom:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_vtr(fname):
|
def load(fname):
|
||||||
"""
|
"""
|
||||||
Read a VTK rectilinear grid.
|
Read a VTK rectilinear grid.
|
||||||
|
|
||||||
|
@ -198,13 +196,13 @@ class Geom:
|
||||||
Valid extension is .vtr, it will be appended if not given.
|
Valid extension is .vtr, it will be appended if not given.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
v = VTK.from_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
|
v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
|
||||||
comments = v.get_comments()
|
comments = v.get_comments()
|
||||||
grid = np.array(v.vtk_data.GetDimensions())-1
|
grid = np.array(v.vtk_data.GetDimensions())-1
|
||||||
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
||||||
size = bbox[1] - bbox[0]
|
size = bbox[1] - bbox[0]
|
||||||
|
|
||||||
return Geom(v.get('materialpoint').reshape(grid,order='F'),size,bbox[0],comments=comments)
|
return Geom(v.get('material').reshape(grid,order='F'),size,bbox[0],comments=comments)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -241,7 +239,7 @@ class Geom:
|
||||||
seeds_p = seeds
|
seeds_p = seeds
|
||||||
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3)
|
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3)
|
||||||
|
|
||||||
pool = multiprocessing.Pool(processes = int(environment.options['DAMASK_NUM_THREADS']))
|
pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS']))
|
||||||
result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords])
|
result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords])
|
||||||
pool.close()
|
pool.close()
|
||||||
pool.join()
|
pool.join()
|
||||||
|
@ -286,131 +284,102 @@ class Geom:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname,format='vtr',pack=None):
|
def save_ASCII(self,fname,compress=None):
|
||||||
"""
|
"""
|
||||||
Writes a geom file.
|
Writes a geom file.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : str or file handle
|
fname : str or file handle
|
||||||
Geometry file to write.
|
Geometry file to write with extension '.geom'.
|
||||||
format : {'vtr', 'ASCII'}, optional
|
compress : bool, optional
|
||||||
File format, defaults to 'vtr'. Available formats are:
|
Compress geometry with 'x of y' and 'a to b'.
|
||||||
- vtr: VTK rectilinear grid file, extension '.vtr'.
|
|
||||||
- ASCII: Plain text file, extension '.geom'.
|
|
||||||
pack : bool, optional
|
|
||||||
Compress ASCII geometry with 'x of y' and 'a to b'.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def _to_ASCII(geom,fname,pack=None):
|
header = [f'{len(self.comments)+4} header'] + self.comments \
|
||||||
"""
|
+ ['grid a {} b {} c {}'.format(*self.grid),
|
||||||
Writes a geom file.
|
'size x {} y {} z {}'.format(*self.size),
|
||||||
|
'origin x {} y {} z {}'.format(*self.origin),
|
||||||
|
'homogenization 1',
|
||||||
|
]
|
||||||
|
|
||||||
Parameters
|
grid = self.grid
|
||||||
----------
|
|
||||||
geom : Geom object
|
|
||||||
Geometry to write.
|
|
||||||
fname : str or file handle
|
|
||||||
Geometry file to write.
|
|
||||||
pack : bool, optional
|
|
||||||
Compress geometry with 'x of y' and 'a to b'.
|
|
||||||
|
|
||||||
"""
|
if compress is None:
|
||||||
header = [f'{len(geom.comments)+4} header'] + geom.comments \
|
plain = grid.prod()/self.N_materials < 250
|
||||||
+[ 'grid a {} b {} c {}'.format(*geom.grid),
|
|
||||||
'size x {} y {} z {}'.format(*geom.size),
|
|
||||||
'origin x {} y {} z {}'.format(*geom.origin),
|
|
||||||
'homogenization 1',
|
|
||||||
]
|
|
||||||
|
|
||||||
grid = geom.grid
|
|
||||||
|
|
||||||
if pack is None:
|
|
||||||
plain = grid.prod()/geom.N_materials < 250
|
|
||||||
else:
|
|
||||||
plain = not pack
|
|
||||||
|
|
||||||
if plain:
|
|
||||||
format_string = '%g' if geom.materials.dtype in np.sctypes['float'] else \
|
|
||||||
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(geom.materials)))))
|
|
||||||
np.savetxt(fname,
|
|
||||||
geom.materials.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
|
||||||
header='\n'.join(header), fmt=format_string, comments='')
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
f = open(fname,'w')
|
|
||||||
except TypeError:
|
|
||||||
f = fname
|
|
||||||
|
|
||||||
compressType = None
|
|
||||||
former = start = -1
|
|
||||||
reps = 0
|
|
||||||
for current in geom.materials.flatten('F'):
|
|
||||||
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
|
||||||
compressType = 'to'
|
|
||||||
reps += 1
|
|
||||||
elif current == former and start == former:
|
|
||||||
compressType = 'of'
|
|
||||||
reps += 1
|
|
||||||
else:
|
|
||||||
if compressType is None:
|
|
||||||
f.write('\n'.join(header)+'\n')
|
|
||||||
elif compressType == '.':
|
|
||||||
f.write(f'{former}\n')
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write(f'{start} to {former}\n')
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write(f'{reps} of {former}\n')
|
|
||||||
|
|
||||||
compressType = '.'
|
|
||||||
start = current
|
|
||||||
reps = 1
|
|
||||||
|
|
||||||
former = current
|
|
||||||
|
|
||||||
if compressType == '.':
|
|
||||||
f.write(f'{former}\n')
|
|
||||||
elif compressType == 'to':
|
|
||||||
f.write(f'{start} to {former}\n')
|
|
||||||
elif compressType == 'of':
|
|
||||||
f.write(f'{reps} of {former}\n')
|
|
||||||
|
|
||||||
|
|
||||||
def _to_vtr(geom,fname=None):
|
|
||||||
"""
|
|
||||||
Generates vtk rectilinear grid.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
geom : Geom object
|
|
||||||
Geometry to write.
|
|
||||||
fname : str, optional
|
|
||||||
Filename to write. If no file is given, a string is returned.
|
|
||||||
Valid extension is .vtr, it will be appended if not given.
|
|
||||||
|
|
||||||
"""
|
|
||||||
v = VTK.from_rectilinearGrid(geom.grid,geom.size,geom.origin)
|
|
||||||
v.add(geom.materials.flatten(order='F'),'materialpoint')
|
|
||||||
v.add_comments(geom.comments)
|
|
||||||
|
|
||||||
if fname:
|
|
||||||
v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
|
|
||||||
else:
|
|
||||||
sys.stdout.write(v.__repr__())
|
|
||||||
|
|
||||||
if format.lower() == 'ascii':
|
|
||||||
return _to_ASCII(self,fname,pack)
|
|
||||||
elif format.lower() == 'vtr':
|
|
||||||
return _to_vtr(self,fname)
|
|
||||||
else:
|
else:
|
||||||
raise TypeError(f'Unknown format {format}.')
|
plain = not compress
|
||||||
|
|
||||||
def as_ASCII(self,pack=False):
|
if plain:
|
||||||
"""Format geometry as human-readable ASCII."""
|
format_string = '%g' if self.materials.dtype in np.sctypes['float'] else \
|
||||||
f = StringIO()
|
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.materials)))))
|
||||||
self.to_file(f,'ASCII',pack)
|
np.savetxt(fname,
|
||||||
f.seek(0)
|
self.materials.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
||||||
return ''.join(f.readlines())
|
header='\n'.join(header), fmt=format_string, comments='')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
f = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
f = fname
|
||||||
|
|
||||||
|
compressType = None
|
||||||
|
former = start = -1
|
||||||
|
reps = 0
|
||||||
|
for current in self.materials.flatten('F'):
|
||||||
|
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
||||||
|
compressType = 'to'
|
||||||
|
reps += 1
|
||||||
|
elif current == former and start == former:
|
||||||
|
compressType = 'of'
|
||||||
|
reps += 1
|
||||||
|
else:
|
||||||
|
if compressType is None:
|
||||||
|
f.write('\n'.join(header)+'\n')
|
||||||
|
elif compressType == '.':
|
||||||
|
f.write(f'{former}\n')
|
||||||
|
elif compressType == 'to':
|
||||||
|
f.write(f'{start} to {former}\n')
|
||||||
|
elif compressType == 'of':
|
||||||
|
f.write(f'{reps} of {former}\n')
|
||||||
|
|
||||||
|
compressType = '.'
|
||||||
|
start = current
|
||||||
|
reps = 1
|
||||||
|
|
||||||
|
former = current
|
||||||
|
|
||||||
|
if compressType == '.':
|
||||||
|
f.write(f'{former}\n')
|
||||||
|
elif compressType == 'to':
|
||||||
|
f.write(f'{start} to {former}\n')
|
||||||
|
elif compressType == 'of':
|
||||||
|
f.write(f'{reps} of {former}\n')
|
||||||
|
|
||||||
|
|
||||||
|
def save(self,fname,compress=True):
|
||||||
|
"""
|
||||||
|
Generates vtk rectilinear grid.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : str, optional
|
||||||
|
Filename to write. If no file is given, a string is returned.
|
||||||
|
Valid extension is .vtr, it will be appended if not given.
|
||||||
|
compress : bool, optional
|
||||||
|
Compress with zlib algorithm. Defaults to True.
|
||||||
|
|
||||||
|
"""
|
||||||
|
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
||||||
|
v.add(self.materials.flatten(order='F'),'material')
|
||||||
|
v.add_comments(self.comments)
|
||||||
|
|
||||||
|
v.save(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr',parallel=False,compress=compress)
|
||||||
|
|
||||||
|
|
||||||
|
def show(self):
|
||||||
|
"""Show on screen."""
|
||||||
|
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
||||||
|
v.show()
|
||||||
|
|
||||||
|
|
||||||
def add_primitive(self,dimension,center,exponent,
|
def add_primitive(self,dimension,center,exponent,
|
||||||
|
|
|
@ -0,0 +1,193 @@
|
||||||
|
from io import StringIO
|
||||||
|
import copy
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from . import Lattice
|
||||||
|
from . import Rotation
|
||||||
|
|
||||||
|
class NiceDumper(yaml.SafeDumper):
|
||||||
|
"""Make YAML readable for humans."""
|
||||||
|
|
||||||
|
def write_line_break(self, data=None):
|
||||||
|
super().write_line_break(data)
|
||||||
|
|
||||||
|
if len(self.indents) == 1:
|
||||||
|
super().write_line_break()
|
||||||
|
|
||||||
|
def increase_indent(self, flow=False, indentless=False):
|
||||||
|
return super().increase_indent(flow, False)
|
||||||
|
|
||||||
|
|
||||||
|
class Material(dict):
|
||||||
|
"""Material configuration."""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Show as in file."""
|
||||||
|
output = StringIO()
|
||||||
|
self.save(output)
|
||||||
|
output.seek(0)
|
||||||
|
return ''.join(output.readlines())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(fname):
|
||||||
|
"""Load from yaml file."""
|
||||||
|
try:
|
||||||
|
fhandle = open(fname)
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
return Material(yaml.safe_load(fhandle))
|
||||||
|
|
||||||
|
def save(self,fname='material.yaml'):
|
||||||
|
"""
|
||||||
|
Save to yaml file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path
|
||||||
|
Filename or file for reading.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
fhandle = open(fname,'w')
|
||||||
|
except TypeError:
|
||||||
|
fhandle = fname
|
||||||
|
fhandle.write(yaml.dump(dict(self),width=256,default_flow_style=None,Dumper=NiceDumper))
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_complete(self):
|
||||||
|
"""Check for completeness."""
|
||||||
|
ok = True
|
||||||
|
for top_level in ['homogenization','phase','microstructure']:
|
||||||
|
# ToDo: With python 3.8 as prerequisite we can shorten with :=
|
||||||
|
ok &= top_level in self
|
||||||
|
if top_level not in self: print(f'{top_level} entry missing')
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
ok &= len(self['microstructure']) > 0
|
||||||
|
if len(self['microstructure']) < 1: print('Incomplete microstructure definition')
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
homogenization = set()
|
||||||
|
phase = set()
|
||||||
|
for i,v in enumerate(self['microstructure']):
|
||||||
|
if 'homogenization' in v:
|
||||||
|
homogenization.add(v['homogenization'])
|
||||||
|
else:
|
||||||
|
print(f'No homogenization specified in microstructure {i}')
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
if 'constituents' in v:
|
||||||
|
for ii,vv in enumerate(v['constituents']):
|
||||||
|
if 'orientation' not in vv:
|
||||||
|
print('No orientation specified in constituent {ii} of microstructure {i}')
|
||||||
|
ok = False
|
||||||
|
if 'phase' in vv:
|
||||||
|
phase.add(vv['phase'])
|
||||||
|
else:
|
||||||
|
print(f'No phase specified in constituent {ii} of microstructure {i}')
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
for k,v in self['phase'].items():
|
||||||
|
if 'lattice' not in v:
|
||||||
|
print(f'No lattice specified in phase {k}')
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
#for k,v in self['homogenization'].items():
|
||||||
|
# if 'N_constituents' not in v:
|
||||||
|
# print(f'No. of constituents not specified in homogenization {k}'}
|
||||||
|
# ok = False
|
||||||
|
|
||||||
|
if phase - set(self['phase']):
|
||||||
|
print(f'Phase(s) {phase-set(self["phase"])} missing')
|
||||||
|
ok = False
|
||||||
|
if homogenization - set(self['homogenization']):
|
||||||
|
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
return ok
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_valid(self):
|
||||||
|
"""Check for valid file layout."""
|
||||||
|
ok = True
|
||||||
|
|
||||||
|
if 'phase' in self:
|
||||||
|
for k,v in self['phase'].items():
|
||||||
|
if 'lattice' in v:
|
||||||
|
try:
|
||||||
|
Lattice(v['lattice'])
|
||||||
|
except KeyError:
|
||||||
|
s = v['lattice']
|
||||||
|
print(f"Invalid lattice: '{s}' in phase '{k}'")
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
if 'microstructure' in self:
|
||||||
|
for i,v in enumerate(self['microstructure']):
|
||||||
|
if 'constituents' in v:
|
||||||
|
f = 0.0
|
||||||
|
for c in v['constituents']:
|
||||||
|
f+= float(c['fraction'])
|
||||||
|
if 'orientation' in c:
|
||||||
|
try:
|
||||||
|
Rotation.from_quaternion(c['orientation'])
|
||||||
|
except ValueError:
|
||||||
|
o = c['orientation']
|
||||||
|
print(f"Invalid orientation: '{o}' in microstructure '{i}'")
|
||||||
|
ok = False
|
||||||
|
if not np.isclose(f,1.0):
|
||||||
|
print(f"Invalid total fraction '{f}' in microstructure '{i}'")
|
||||||
|
ok = False
|
||||||
|
|
||||||
|
return ok
|
||||||
|
|
||||||
|
|
||||||
|
def microstructure_rename_phase(self,mapping,ID=None,constituent=None):
|
||||||
|
"""
|
||||||
|
Change phase name in microstructure.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
mapping: dictionary
|
||||||
|
Mapping from old name to new name
|
||||||
|
ID: list of ints, optional
|
||||||
|
Limit renaming to selected microstructure IDs.
|
||||||
|
constituent: list of ints, optional
|
||||||
|
Limit renaming to selected constituents.
|
||||||
|
|
||||||
|
"""
|
||||||
|
dup = copy.deepcopy(self)
|
||||||
|
for i,m in enumerate(dup['microstructure']):
|
||||||
|
if ID and i not in ID: continue
|
||||||
|
for c in m['constituents']:
|
||||||
|
if constituent is not None and c not in constituent: continue
|
||||||
|
try:
|
||||||
|
c['phase'] = mapping[c['phase']]
|
||||||
|
except KeyError:
|
||||||
|
continue
|
||||||
|
return dup
|
||||||
|
|
||||||
|
|
||||||
|
def microstructure_rename_homogenization(self,mapping,ID=None):
|
||||||
|
"""
|
||||||
|
Change homogenization name in microstructure.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
mapping: dictionary
|
||||||
|
Mapping from old name to new name
|
||||||
|
ID: list of ints, optional
|
||||||
|
Limit renaming to selected homogenization IDs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
dup = copy.deepcopy(self)
|
||||||
|
for i,m in enumerate(dup['microstructure']):
|
||||||
|
if ID and i not in ID: continue
|
||||||
|
try:
|
||||||
|
m['homogenization'] = mapping[m['homogenization']]
|
||||||
|
except KeyError:
|
||||||
|
continue
|
||||||
|
return dup
|
|
@ -1100,7 +1100,7 @@ class Result:
|
||||||
pool.join()
|
pool.join()
|
||||||
|
|
||||||
|
|
||||||
def write_XDMF(self):
|
def save_XDMF(self):
|
||||||
"""
|
"""
|
||||||
Write XDMF file to directly visualize data in DADF5 file.
|
Write XDMF file to directly visualize data in DADF5 file.
|
||||||
|
|
||||||
|
@ -1196,7 +1196,7 @@ class Result:
|
||||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||||
|
|
||||||
|
|
||||||
def to_vtk(self,labels=[],mode='cell'):
|
def save_vtk(self,labels=[],mode='cell'):
|
||||||
"""
|
"""
|
||||||
Export to vtk cell/point data.
|
Export to vtk cell/point data.
|
||||||
|
|
||||||
|
@ -1268,4 +1268,4 @@ class Result:
|
||||||
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||||
v.add(u,'u')
|
v.add(u,'u')
|
||||||
|
|
||||||
v.to_file(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
v.save(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
||||||
|
|
|
@ -27,8 +27,11 @@ class Table:
|
||||||
self.comments = [] if comments_ is None else [c for c in comments_]
|
self.comments = [] if comments_ is None else [c for c in comments_]
|
||||||
self.data = pd.DataFrame(data=data)
|
self.data = pd.DataFrame(data=data)
|
||||||
self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
|
self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
|
||||||
self._label_condensed()
|
self._label_uniform()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Brief overview."""
|
||||||
|
return util.srepr(self.comments)+'\n'+self.data.__repr__()
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""Copy Table."""
|
"""Copy Table."""
|
||||||
|
@ -39,7 +42,7 @@ class Table:
|
||||||
return self.__copy__()
|
return self.__copy__()
|
||||||
|
|
||||||
|
|
||||||
def _label_flat(self):
|
def _label_discrete(self):
|
||||||
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
||||||
labels = []
|
labels = []
|
||||||
for label,shape in self.shapes.items():
|
for label,shape in self.shapes.items():
|
||||||
|
@ -48,8 +51,8 @@ class Table:
|
||||||
self.data.columns = labels
|
self.data.columns = labels
|
||||||
|
|
||||||
|
|
||||||
def _label_condensed(self):
|
def _label_uniform(self):
|
||||||
"""Label data condensed, e.g. 1_v 2_v 3_v ==> v v v."""
|
"""Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v."""
|
||||||
labels = []
|
labels = []
|
||||||
for label,shape in self.shapes.items():
|
for label,shape in self.shapes.items():
|
||||||
labels += [label] * int(np.prod(shape))
|
labels += [label] * int(np.prod(shape))
|
||||||
|
@ -64,12 +67,15 @@ class Table:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_ASCII(fname):
|
def load(fname):
|
||||||
"""
|
"""
|
||||||
Create table from ASCII file.
|
Load ASCII table file.
|
||||||
|
|
||||||
The first line can indicate the number of subsequent header lines as 'n header',
|
In legacy style, the first line indicates the number of
|
||||||
alternatively first line is the header and comments are marked by '#' ('new style').
|
subsequent header lines as "N header", with the last header line being
|
||||||
|
interpreted as column labels.
|
||||||
|
Alternatively, initial comments are marked by '#', with the first non-comment line
|
||||||
|
containing the column labels.
|
||||||
Vector data column labels are indicated by '1_v, 2_v, ..., n_v'.
|
Vector data column labels are indicated by '1_v, 2_v, ..., n_v'.
|
||||||
Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'.
|
Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'.
|
||||||
|
|
||||||
|
@ -119,9 +125,9 @@ class Table:
|
||||||
return Table(data,shapes,comments)
|
return Table(data,shapes,comments)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_ang(fname):
|
def load_ang(fname):
|
||||||
"""
|
"""
|
||||||
Create table from TSL ang file.
|
Load ang file.
|
||||||
|
|
||||||
A valid TSL ang file needs to contains the following columns:
|
A valid TSL ang file needs to contains the following columns:
|
||||||
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
||||||
|
@ -289,9 +295,9 @@ class Table:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._label_flat()
|
dup._label_discrete()
|
||||||
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
||||||
dup._label_condensed()
|
dup._label_uniform()
|
||||||
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
@ -338,59 +344,38 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def to_file(self,fname,format='ASCII',new_style=False):
|
def save(self,fname,legacy=False):
|
||||||
"""
|
"""
|
||||||
Store as plain text file.
|
Save as plain text file.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : file, str, or pathlib.Path
|
fname : file, str, or pathlib.Path
|
||||||
Filename or file for writing.
|
Filename or file for writing.
|
||||||
format : {ASCII'}, optional
|
legacy : Boolean, optional
|
||||||
File format, defaults to 'ASCII'. Available formats are:
|
Write table in legacy style, indicating header lines by "N header"
|
||||||
- ASCII: Plain text file, extension '.txt'.
|
in contrast to using comment sign ('#') at beginning of lines.
|
||||||
new_style : Boolean, optional
|
|
||||||
Write table in new style, indicating header lines by comment sign ('#') only.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def _to_ASCII(table,fname,new_style=False):
|
seen = set()
|
||||||
"""
|
labels = []
|
||||||
Store as plain text file.
|
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
|
||||||
|
if self.shapes[l] == (1,):
|
||||||
|
labels.append(f'{l}')
|
||||||
|
elif len(self.shapes[l]) == 1:
|
||||||
|
labels += [f'{i+1}_{l}' \
|
||||||
|
for i in range(self.shapes[l][0])]
|
||||||
|
else:
|
||||||
|
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
|
||||||
|
for i in range(np.prod(self.shapes[l]))]
|
||||||
|
|
||||||
Parameters
|
header = ([f'{len(self.comments)+1} header'] + self.comments) if legacy else \
|
||||||
----------
|
[f'# {comment}' for comment in self.comments]
|
||||||
table : Table object
|
|
||||||
Table to write.
|
|
||||||
fname : file, str, or pathlib.Path
|
|
||||||
Filename or file for writing.
|
|
||||||
new_style : Boolean, optional
|
|
||||||
Write table in new style, indicating header lines by comment sign ('#') only.
|
|
||||||
|
|
||||||
"""
|
try:
|
||||||
seen = set()
|
fhandle = open(fname,'w')
|
||||||
labels = []
|
except TypeError:
|
||||||
for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]:
|
fhandle = fname
|
||||||
if table.shapes[l] == (1,):
|
|
||||||
labels.append(f'{l}')
|
|
||||||
elif len(table.shapes[l]) == 1:
|
|
||||||
labels += [f'{i+1}_{l}' \
|
|
||||||
for i in range(table.shapes[l][0])]
|
|
||||||
else:
|
|
||||||
labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \
|
|
||||||
for i in range(np.prod(table.shapes[l]))]
|
|
||||||
|
|
||||||
header = [f'# {comment}' for comment in table.comments] if new_style else \
|
for line in header + [' '.join(labels)]: fhandle.write(line+'\n')
|
||||||
[f'{len(table.comments)+1} header'] + table.comments
|
self.data.to_csv(fhandle,sep=' ',na_rep='nan',index=False,header=False)
|
||||||
|
|
||||||
try:
|
|
||||||
f = open(fname,'w')
|
|
||||||
except TypeError:
|
|
||||||
f = fname
|
|
||||||
|
|
||||||
for line in header + [' '.join(labels)]: f.write(line+'\n')
|
|
||||||
table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
|
|
||||||
|
|
||||||
if format.lower() == 'ascii':
|
|
||||||
return _to_ASCII(self,fname,new_style)
|
|
||||||
else:
|
|
||||||
raise TypeError(f'Unknown format {format}.')
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ class Test:
|
||||||
|
|
||||||
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
||||||
|
|
||||||
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
source = os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
||||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||||
for i,f in enumerate(sourcefiles):
|
for i,f in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
|
@ -287,30 +287,30 @@ class Test:
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
logging.info('\n '.join(['comparing',File1,File2]))
|
logging.info('\n '.join(['comparing',File1,File2]))
|
||||||
table = damask.Table.from_ASCII(File1)
|
table = damask.Table.load(File1)
|
||||||
len1=len(table.comments)+2
|
len1 = len(table.comments)+2
|
||||||
table = damask.Table.from_ASCII(File2)
|
table = damask.Table.load(File2)
|
||||||
len2=len(table.comments)+2
|
len2 = len(table.comments)+2
|
||||||
|
|
||||||
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True))
|
||||||
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True))
|
||||||
|
|
||||||
if len(curArray) == len(refArray):
|
if len(curArray) == len(refArray):
|
||||||
refArrayNonZero = refArray[refArray.nonzero()]
|
refArrayNonZero = refArray[refArray.nonzero()]
|
||||||
curArray = curArray[refArray.nonzero()]
|
curArray = curArray[refArray.nonzero()]
|
||||||
max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
max_err = np. max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||||
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
max_loc = np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||||
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
||||||
curArray = curArray[curArray.nonzero()]
|
curArray = curArray[curArray.nonzero()]
|
||||||
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
||||||
return max_err
|
return max_err
|
||||||
else:
|
else:
|
||||||
raise Exception('mismatch in array size to compare')
|
raise Exception(f'mismatch in array sizes ({len(refArray)} and {len(curArray)}) to compare')
|
||||||
|
|
||||||
|
|
||||||
def compare_ArrayRefCur(self,ref,cur=''):
|
def compare_ArrayRefCur(self,ref,cur=''):
|
||||||
|
|
||||||
if cur =='': cur = ref
|
if cur == '': cur = ref
|
||||||
refName = self.fileInReference(ref)
|
refName = self.fileInReference(ref)
|
||||||
curName = self.fileInCurrent(cur)
|
curName = self.fileInCurrent(cur)
|
||||||
return self.compare_Array(refName,curName)
|
return self.compare_Array(refName,curName)
|
||||||
|
@ -331,7 +331,7 @@ class Test:
|
||||||
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
||||||
if normHeadings == '': normHeadings = headings0
|
if normHeadings == '': normHeadings = headings0
|
||||||
|
|
||||||
# check if comparison is possible and determine lenght of columns
|
# check if comparison is possible and determine length of columns
|
||||||
if len(headings0) == len(headings1) == len(normHeadings):
|
if len(headings0) == len(headings1) == len(normHeadings):
|
||||||
dataLength = len(headings0)
|
dataLength = len(headings0)
|
||||||
length = [1 for i in range(dataLength)]
|
length = [1 for i in range(dataLength)]
|
||||||
|
@ -399,10 +399,8 @@ class Test:
|
||||||
if any(norm[i]) == 0.0 or absTol[i]:
|
if any(norm[i]) == 0.0 or absTol[i]:
|
||||||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||||
absTol[i] = True
|
absTol[i] = True
|
||||||
if perLine:
|
logging.warning(f'''{"At least one" if perLine else "Maximum"} norm of
|
||||||
logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
"{headings0[i]['label']}" in first table is 0.0, using absolute tolerance''')
|
||||||
else:
|
|
||||||
logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
|
||||||
|
|
||||||
line1 = 0
|
line1 = 0
|
||||||
while table1.data_read(): # read next data line of ASCII table
|
while table1.data_read(): # read next data line of ASCII table
|
||||||
|
@ -418,20 +416,18 @@ class Test:
|
||||||
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
for i in range(dataLength):
|
for i in range(dataLength):
|
||||||
if absTol[i]:
|
logging.info(f''' * maximum {'absolute' if absTol[i] else 'relative'} error {maxError[i]}
|
||||||
logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
between {headings0[i]['label']} and {headings1[i]['label']}''')
|
||||||
else:
|
|
||||||
logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
return maxError
|
return maxError
|
||||||
|
|
||||||
|
|
||||||
def compare_TablesStatistically(self,
|
def compare_TablesStatistically(self,
|
||||||
files = [None,None], # list of file names
|
files = [None,None], # list of file names
|
||||||
columns = [None], # list of list of column labels (per file)
|
columns = [None], # list of list of column labels (per file)
|
||||||
meanTol = 1.0e-4,
|
meanTol = 1.0e-4,
|
||||||
stdTol = 1.0e-6,
|
stdTol = 1.0e-6,
|
||||||
preFilter = 1.0e-9):
|
preFilter = 1.0e-9):
|
||||||
"""
|
"""
|
||||||
Calculate statistics of tables.
|
Calculate statistics of tables.
|
||||||
|
|
||||||
|
@ -440,9 +436,9 @@ class Test:
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
tables = [damask.Table.from_ASCII(filename) for filename in files]
|
tables = [damask.Table.load(filename) for filename in files]
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table._label_flat()
|
table._label_discrete()
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
@ -462,7 +458,7 @@ class Test:
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for table,labels in zip(tables,columns):
|
for table,labels in zip(tables,columns):
|
||||||
table._label_condensed()
|
table._label_uniform()
|
||||||
data.append(np.hstack(list(table.get(label) for label in labels)))
|
data.append(np.hstack(list(table.get(label) for label in labels)))
|
||||||
|
|
||||||
|
|
||||||
|
@ -471,12 +467,11 @@ class Test:
|
||||||
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
||||||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||||
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
||||||
std = np.amax(np.std(normedDelta,0))
|
std = np.amax(np.std(normedDelta,0))
|
||||||
logging.info(f'mean: {mean:f}')
|
logging.info(f'mean: {mean:f}')
|
||||||
logging.info(f'std: {std:f}')
|
logging.info(f'std: {std:f}')
|
||||||
|
|
||||||
return (mean<meanTol) & (std < stdTol)
|
return (mean < meanTol) & (std < stdTol)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def compare_Tables(self,
|
def compare_Tables(self,
|
||||||
|
@ -491,7 +486,7 @@ class Test:
|
||||||
|
|
||||||
if len(files) < 2: return True # single table is always close to itself...
|
if len(files) < 2: return True # single table is always close to itself...
|
||||||
|
|
||||||
tables = [damask.Table.from_ASCII(filename) for filename in files]
|
tables = [damask.Table.load(filename) for filename in files]
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
@ -580,7 +575,7 @@ class Test:
|
||||||
|
|
||||||
if culprit == 0:
|
if culprit == 0:
|
||||||
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
||||||
msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
|
msg = ('Test passed.' if count == 1 else f'All {count} tests passed.') + '\a\a\a'
|
||||||
elif culprit == -1:
|
elif culprit == -1:
|
||||||
msg = 'Warning: could not start test...'
|
msg = 'Warning: could not start test...'
|
||||||
ret = 0
|
ret = 0
|
||||||
|
|
|
@ -118,7 +118,7 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_file(fname,dataset_type=None):
|
def load(fname,dataset_type=None):
|
||||||
"""
|
"""
|
||||||
Create VTK from file.
|
Create VTK from file.
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ class VTK:
|
||||||
def _write(writer):
|
def _write(writer):
|
||||||
"""Wrapper for parallel writing."""
|
"""Wrapper for parallel writing."""
|
||||||
writer.Write()
|
writer.Write()
|
||||||
def to_file(self,fname,parallel=True,compress=True):
|
def save(self,fname,parallel=True,compress=True):
|
||||||
"""
|
"""
|
||||||
Write to file.
|
Write to file.
|
||||||
|
|
||||||
|
@ -178,6 +178,8 @@ class VTK:
|
||||||
Filename for writing.
|
Filename for writing.
|
||||||
parallel : boolean, optional
|
parallel : boolean, optional
|
||||||
Write data in parallel background process. Defaults to True.
|
Write data in parallel background process. Defaults to True.
|
||||||
|
compress : bool, optional
|
||||||
|
Compress with zlib algorithm. Defaults to True.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
|
if isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
|
||||||
|
|
|
@ -172,8 +172,9 @@ def scale_to_coprime(v):
|
||||||
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
||||||
m = m//reduce(np.gcd,m)
|
m = m//reduce(np.gcd,m)
|
||||||
|
|
||||||
if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]):
|
with np.errstate(divide='ignore'):
|
||||||
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
|
if not np.allclose(np.ma.masked_invalid(v/m),v[np.argmax(abs(v))]/m[np.argmax(abs(v))]):
|
||||||
|
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
|
||||||
|
|
||||||
return m
|
return m
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw
|
AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q==
|
AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q==
|
AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU=
|
AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ=
|
AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA==
|
AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="2" RangeMax="41">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="2" RangeMax="41">
|
||||||
AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg==
|
AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg==
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU=
|
AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU=
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
<?xml version="1.0"?>
|
<?xml version="1.0"?>
|
||||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||||
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
<RectilinearGrid WholeExtent="0 8 0 5 0 4">
|
||||||
|
<FieldData>
|
||||||
|
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||||
|
AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA==
|
||||||
|
</Array>
|
||||||
|
</FieldData>
|
||||||
<Piece Extent="0 8 0 5 0 4">
|
<Piece Extent="0 8 0 5 0 4">
|
||||||
<PointData>
|
<PointData>
|
||||||
</PointData>
|
</PointData>
|
||||||
<CellData>
|
<CellData>
|
||||||
<DataArray type="Int64" Name="materialpoint" format="binary" RangeMin="1" RangeMax="2">
|
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||||
AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh
|
AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh
|
||||||
</DataArray>
|
</DataArray>
|
||||||
</CellData>
|
</CellData>
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
homogenization:
|
||||||
|
SX:
|
||||||
|
mech: {type: none}
|
||||||
|
Taylor:
|
||||||
|
mech: {type: isostrain, N_constituents: 2}
|
||||||
|
|
||||||
|
microstructure:
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
orientation: [1.0, 0.0, 0.0, 0.0]
|
||||||
|
phase: Aluminum
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
orientation: [0.7936696712125002, -0.28765777461664166, -0.3436487135089419, 0.4113964260949434]
|
||||||
|
phase: Aluminum
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
orientation: [0.3986143167493579, -0.7014883552495493, 0.2154871765709027, 0.5500781677772945]
|
||||||
|
phase: Aluminum
|
||||||
|
homogenization: SX
|
||||||
|
- homogenization: Taylor
|
||||||
|
constituents:
|
||||||
|
- fraction: .5
|
||||||
|
orientation: [0.28645844315788244, -0.022571491243423537, -0.467933059311115, -0.8357456192708106]
|
||||||
|
phase: Aluminum
|
||||||
|
- fraction: .5
|
||||||
|
orientation: [0.3986143167493579, -0.7014883552495493, 0.2154871765709027, 0.5500781677772945]
|
||||||
|
phase: Steel
|
||||||
|
|
||||||
|
phase:
|
||||||
|
Aluminum:
|
||||||
|
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
|
||||||
|
generic:
|
||||||
|
output: [F, P, Fe, Fp, Lp]
|
||||||
|
lattice: fcc
|
||||||
|
Steel:
|
||||||
|
elasticity: {C_11: 233.3e9, C_12: 135.5e9, C_44: 118.0e9, type: hooke}
|
||||||
|
generic:
|
||||||
|
output: [F, P, Fe, Fp, Lp]
|
||||||
|
lattice: bcc
|
|
@ -0,0 +1,276 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import damask\n",
|
||||||
|
"\n",
|
||||||
|
"from pathlib import Path"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"orientations,rODF = damask.Rotation.from_ODF('hybridIA_ODF.txt',\n",
|
||||||
|
" 2**14,\n",
|
||||||
|
" degrees=True,\n",
|
||||||
|
" reconstruct=True,\n",
|
||||||
|
" fractions=True,\n",
|
||||||
|
" seed=0)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"VTK = damask.VTK.from_rectilinearGrid([36,36,36],[90,90,90])\n",
|
||||||
|
"VTK.add(damask.Table.from_ASCII('hybridIA_ODF.txt').get('intensity'),'intensity')\n",
|
||||||
|
"VTK.add(rODF.flatten(order='F'),'rODF')\n",
|
||||||
|
"VTK.to_file('hybridIA_ODF.vtr')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 16,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Help on class VTK in module damask._vtk:\n",
|
||||||
|
"\n",
|
||||||
|
"class VTK(builtins.object)\n",
|
||||||
|
" | VTK(geom)\n",
|
||||||
|
" | \n",
|
||||||
|
" | Spatial visualization (and potentially manipulation).\n",
|
||||||
|
" | \n",
|
||||||
|
" | High-level interface to VTK.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Methods defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | __init__(self, geom)\n",
|
||||||
|
" | Set geometry and topology.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | geom : subclass of vtk.vtkDataSet\n",
|
||||||
|
" | Description of geometry and topology. Valid types are vtk.vtkRectilinearGrid,\n",
|
||||||
|
" | vtk.vtkUnstructuredGrid, or vtk.vtkPolyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | __repr__(self)\n",
|
||||||
|
" | ASCII representation of the VTK data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | add(self, data, label=None)\n",
|
||||||
|
" | Add data to either cells or points.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | data : numpy.ndarray\n",
|
||||||
|
" | Data to add. First dimension need to match either\n",
|
||||||
|
" | number of cells or number of points\n",
|
||||||
|
" | label : str\n",
|
||||||
|
" | Data label.\n",
|
||||||
|
" | \n",
|
||||||
|
" | add_comments(self, comments)\n",
|
||||||
|
" | Add Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | comments : str or list of str\n",
|
||||||
|
" | Comments to add.\n",
|
||||||
|
" | \n",
|
||||||
|
" | get(self, label)\n",
|
||||||
|
" | Get either cell or point data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Cell data takes precedence over point data, i.e. this\n",
|
||||||
|
" | function assumes that labels are unique among cell and\n",
|
||||||
|
" | point data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | label : str\n",
|
||||||
|
" | Data label.\n",
|
||||||
|
" | \n",
|
||||||
|
" | get_comments(self)\n",
|
||||||
|
" | Return the comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | set_comments(self, comments)\n",
|
||||||
|
" | Set Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | comments : str or list of str\n",
|
||||||
|
" | Comments.\n",
|
||||||
|
" | \n",
|
||||||
|
" | show(self)\n",
|
||||||
|
" | Render.\n",
|
||||||
|
" | \n",
|
||||||
|
" | See http://compilatrix.com/article/vtk-1 for further ideas.\n",
|
||||||
|
" | \n",
|
||||||
|
" | write(self, fname, parallel=True)\n",
|
||||||
|
" | Write to file.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | fname : str or pathlib.Path\n",
|
||||||
|
" | Filename for writing.\n",
|
||||||
|
" | parallel : boolean, optional\n",
|
||||||
|
" | Write data in parallel background process. Defaults to True.\n",
|
||||||
|
" | \n",
|
||||||
|
" | ----------------------------------------------------------------------\n",
|
||||||
|
" | Static methods defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_file(fname, dataset_type=None)\n",
|
||||||
|
" | Create VTK from file.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | fname : str or pathlib.Path\n",
|
||||||
|
" | Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.\n",
|
||||||
|
" | dataset_type : str, optional\n",
|
||||||
|
" | Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,\n",
|
||||||
|
" | vtkUnstructuredGrid, and vtkPolyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_polyData(points)\n",
|
||||||
|
" | Create VTK of type vtk.polyData.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for point-wise data.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | points : numpy.ndarray of shape (:,3)\n",
|
||||||
|
" | Spatial position of the points.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_rectilinearGrid(grid, size, origin=array([0., 0., 0.]))\n",
|
||||||
|
" | Create VTK of type vtk.vtkRectilinearGrid.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for results from the grid solver.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | grid : numpy.ndarray of shape (3) of np.dtype = int\n",
|
||||||
|
" | Number of cells.\n",
|
||||||
|
" | size : numpy.ndarray of shape (3)\n",
|
||||||
|
" | Physical length.\n",
|
||||||
|
" | origin : numpy.ndarray of shape (3), optional\n",
|
||||||
|
" | Spatial origin.\n",
|
||||||
|
" | \n",
|
||||||
|
" | from_unstructuredGrid(nodes, connectivity, cell_type)\n",
|
||||||
|
" | Create VTK of type vtk.vtkUnstructuredGrid.\n",
|
||||||
|
" | \n",
|
||||||
|
" | This is the common type for results from FEM solvers.\n",
|
||||||
|
" | \n",
|
||||||
|
" | Parameters\n",
|
||||||
|
" | ----------\n",
|
||||||
|
" | nodes : numpy.ndarray of shape (:,3)\n",
|
||||||
|
" | Spatial position of the nodes.\n",
|
||||||
|
" | connectivity : numpy.ndarray of np.dtype = int\n",
|
||||||
|
" | Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.\n",
|
||||||
|
" | cell_type : str\n",
|
||||||
|
" | Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.\n",
|
||||||
|
" | \n",
|
||||||
|
" | ----------------------------------------------------------------------\n",
|
||||||
|
" | Data descriptors defined here:\n",
|
||||||
|
" | \n",
|
||||||
|
" | __dict__\n",
|
||||||
|
" | dictionary for instance variables (if defined)\n",
|
||||||
|
" | \n",
|
||||||
|
" | __weakref__\n",
|
||||||
|
" | list of weak references to the object (if defined)\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"help(damask.VTK)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 18,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"a,b=np.radians(([90,90],[45,45]))"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 19,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"array([1.57079633, 1.57079633])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 19,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"a"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"array([0.78539816, 0.78539816])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"b"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 4
|
||||||
|
}
|
|
@ -75,41 +75,36 @@ class TestColormap:
|
||||||
assert np.allclose(Colormap._xyz2msh(xyz),msh,atol=1.e-6,rtol=0)
|
assert np.allclose(Colormap._xyz2msh(xyz),msh,atol=1.e-6,rtol=0)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh'])
|
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
|
||||||
@pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh'])
|
@pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh'])
|
||||||
def test_from_range(self,model,format,tmpdir):
|
def test_from_range(self,model,format,tmpdir):
|
||||||
N = np.random.randint(2,256)
|
N = np.random.randint(2,256)
|
||||||
c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N)
|
c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N) # noqa
|
||||||
c.to_file(tmpdir/'color_out',format=format)
|
eval(f'c.save_{format}(tmpdir/"color_out")')
|
||||||
|
|
||||||
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh'])
|
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
|
||||||
@pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis'])
|
@pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis'])
|
||||||
def test_from_predefined(self,name,format,tmpdir):
|
def test_from_predefined(self,name,format,tmpdir):
|
||||||
N = np.random.randint(2,256)
|
N = np.random.randint(2,256)
|
||||||
c = Colormap.from_predefined(name,N)
|
c = Colormap.from_predefined(name,N) # noqa
|
||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
|
|
||||||
@pytest.mark.parametrize('format,name',[('ASCII','test.txt'),
|
@pytest.mark.parametrize('format,name',[('ASCII','test.txt'),
|
||||||
('paraview','test.json'),
|
('paraview','test.json'),
|
||||||
('GOM','test.legend'),
|
('GOM','test.legend'),
|
||||||
('Gmsh','test.msh')
|
('gmsh','test.msh')
|
||||||
])
|
])
|
||||||
def test_write_filehandle(self,format,name,tmpdir):
|
def test_write_filehandle(self,format,name,tmpdir):
|
||||||
c = Colormap.from_predefined('Dark2')
|
c = Colormap.from_predefined('Dark2') # noqa
|
||||||
fname = tmpdir/name
|
fname = tmpdir/name
|
||||||
with open(fname,'w') as f:
|
with open(fname,'w') as f: # noqa
|
||||||
c.to_file(f,format=format)
|
eval(f'c.save_{format}(f)')
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
if fname.exists(): return
|
if fname.exists(): return
|
||||||
time.sleep(.5)
|
time.sleep(.5)
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
def test_write_invalid_format(self):
|
|
||||||
c = Colormap.from_predefined('Dark2')
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
c.to_file(format='invalid')
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model',['rgb','hsv','hsl','lab','invalid'])
|
@pytest.mark.parametrize('model',['rgb','hsv','hsl','lab','invalid'])
|
||||||
def test_invalid_color(self,model):
|
def test_invalid_color(self,model):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -119,13 +114,13 @@ class TestColormap:
|
||||||
c_1 = Colormap.from_predefined('stress')
|
c_1 = Colormap.from_predefined('stress')
|
||||||
c_2 = c_1.reversed()
|
c_2 = c_1.reversed()
|
||||||
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
||||||
np.allclose(c_1.colors,c_2.reversed().colors)
|
np.allclose(c_1.colors,c_2.reversed().colors)
|
||||||
|
|
||||||
def test_invert(self):
|
def test_invert(self):
|
||||||
c_1 = Colormap.from_predefined('strain')
|
c_1 = Colormap.from_predefined('strain')
|
||||||
c_2 = ~c_1
|
c_2 = ~c_1
|
||||||
assert (not np.allclose(c_1.colors,c_2.colors)) and \
|
assert (not np.allclose(c_1.colors, c_2.colors)) and \
|
||||||
np.allclose(c_1.colors,(~c_2).colors)
|
np.allclose(c_1.colors,(~c_2).colors)
|
||||||
|
|
||||||
def test_add(self):
|
def test_add(self):
|
||||||
c = Colormap.from_predefined('jet')
|
c = Colormap.from_predefined('jet')
|
||||||
|
@ -149,16 +144,16 @@ class TestColormap:
|
||||||
@pytest.mark.parametrize('format,ext',[('ASCII','.txt'),
|
@pytest.mark.parametrize('format,ext',[('ASCII','.txt'),
|
||||||
('paraview','.json'),
|
('paraview','.json'),
|
||||||
('GOM','.legend'),
|
('GOM','.legend'),
|
||||||
('Gmsh','.msh')
|
('gmsh','.msh')
|
||||||
])
|
])
|
||||||
def test_compare_reference(self,format,ext,tmpdir,reference_dir,update):
|
def test_compare_reference(self,format,ext,tmpdir,reference_dir,update):
|
||||||
name = 'binary'
|
name = 'binary'
|
||||||
c = Colormap.from_predefined(name)
|
c = Colormap.from_predefined(name) # noqa
|
||||||
if update:
|
if update:
|
||||||
os.chdir(reference_dir)
|
os.chdir(reference_dir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
else:
|
else:
|
||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
c.to_file(format=format)
|
eval(f'c.save_{format}()')
|
||||||
time.sleep(.5)
|
time.sleep(.5)
|
||||||
assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext))
|
assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext))
|
||||||
|
|
|
@ -43,34 +43,26 @@ class TestGeom:
|
||||||
|
|
||||||
|
|
||||||
def test_write_read_str(self,default,tmpdir):
|
def test_write_read_str(self,default,tmpdir):
|
||||||
default.to_file(str(tmpdir/'default.geom'),format='ASCII')
|
default.save_ASCII(str(tmpdir/'default.geom'))
|
||||||
new = Geom.from_file(str(tmpdir/'default.geom'))
|
new = Geom.load_ASCII(str(tmpdir/'default.geom'))
|
||||||
assert geom_equal(default,new)
|
assert geom_equal(default,new)
|
||||||
|
|
||||||
|
|
||||||
def test_write_read_file(self,default,tmpdir):
|
def test_write_read_file(self,default,tmpdir):
|
||||||
with open(tmpdir/'default.geom','w') as f:
|
with open(tmpdir/'default.geom','w') as f:
|
||||||
default.to_file(f,format='ASCII',pack=True)
|
default.save_ASCII(f,compress=True)
|
||||||
with open(tmpdir/'default.geom') as f:
|
with open(tmpdir/'default.geom') as f:
|
||||||
new = Geom.from_file(f)
|
new = Geom.load_ASCII(f)
|
||||||
assert geom_equal(default,new)
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_as_ASCII(self,default,tmpdir):
|
|
||||||
with open(tmpdir/'str.geom','w') as f:
|
|
||||||
f.write(default.as_ASCII())
|
|
||||||
with open(tmpdir/'str.geom') as f:
|
|
||||||
new = Geom.from_file(f)
|
|
||||||
assert geom_equal(default,new)
|
assert geom_equal(default,new)
|
||||||
|
|
||||||
|
|
||||||
def test_read_write_vtr(self,default,tmpdir):
|
def test_read_write_vtr(self,default,tmpdir):
|
||||||
default.to_file(tmpdir/'default',format='vtr')
|
default.save(tmpdir/'default')
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(tmpdir/'default.vtr'): break
|
if os.path.exists(tmpdir/'default.vtr'): break
|
||||||
|
|
||||||
new = Geom.from_vtr(tmpdir/'default.vtr')
|
new = Geom.load(tmpdir/'default.vtr')
|
||||||
assert geom_equal(new,default)
|
assert geom_equal(new,default)
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,23 +71,23 @@ class TestGeom:
|
||||||
f.write('this is not a valid header')
|
f.write('this is not a valid header')
|
||||||
with open('invalid_file','r') as f:
|
with open('invalid_file','r') as f:
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
Geom.from_file(f)
|
Geom.load_ASCII(f)
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_vtr(self,tmpdir):
|
def test_invalid_vtr(self,tmpdir):
|
||||||
v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
||||||
v.to_file(tmpdir/'no_materialpoint.vtr')
|
v.save(tmpdir/'no_materialpoint.vtr')
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(tmpdir/'no_materialpoint.vtr'): break
|
if os.path.exists(tmpdir/'no_materialpoint.vtr'): break
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
Geom.from_vtr(tmpdir/'no_materialpoint.vtr')
|
Geom.load(tmpdir/'no_materialpoint.vtr')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('pack',[True,False])
|
@pytest.mark.parametrize('compress',[True,False])
|
||||||
def test_pack(self,default,tmpdir,pack):
|
def test_compress(self,default,tmpdir,compress):
|
||||||
default.to_file(tmpdir/'default.geom',format='ASCII',pack=pack)
|
default.save_ASCII(tmpdir/'default.geom',compress=compress)
|
||||||
new = Geom.from_file(tmpdir/'default.geom')
|
new = Geom.load_ASCII(tmpdir/'default.geom')
|
||||||
assert geom_equal(new,default)
|
assert geom_equal(new,default)
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,11 +117,6 @@ class TestGeom:
|
||||||
Geom(materials)
|
Geom(materials)
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_write_format(self,default):
|
|
||||||
with pytest.raises(TypeError):
|
|
||||||
default.to_file(format='invalid')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('directions,reflect',[
|
@pytest.mark.parametrize('directions,reflect',[
|
||||||
(['x'], False),
|
(['x'], False),
|
||||||
(['x','y','z'],True),
|
(['x','y','z'],True),
|
||||||
|
@ -141,8 +128,8 @@ class TestGeom:
|
||||||
modified = default.mirror(directions,reflect)
|
modified = default.mirror(directions,reflect)
|
||||||
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
||||||
reference = reference_dir/f'mirror_{tag}.geom'
|
reference = reference_dir/f'mirror_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,8 +150,8 @@ class TestGeom:
|
||||||
modified = default.flip(directions)
|
modified = default.flip(directions)
|
||||||
tag = f'directions={"-".join(directions)}'
|
tag = f'directions={"-".join(directions)}'
|
||||||
reference = reference_dir/f'flip_{tag}.geom'
|
reference = reference_dir/f'flip_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
|
@ -190,11 +177,11 @@ class TestGeom:
|
||||||
current = default.clean(stencil,selection,periodic)
|
current = default.clean(stencil,selection,periodic)
|
||||||
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
|
reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
|
||||||
if update and stencil > 1:
|
if update and stencil > 1:
|
||||||
current.to_file(reference,format='vtr')
|
current.save(reference)
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
if os.path.exists(reference.with_suffix('.vtr')): break
|
if os.path.exists(reference.with_suffix('.vtr')): break
|
||||||
assert geom_equal(Geom.from_vtr(reference) if stencil > 1 else default,
|
assert geom_equal(Geom.load(reference) if stencil > 1 else default,
|
||||||
current
|
current
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -212,8 +199,8 @@ class TestGeom:
|
||||||
modified = default.scale(grid)
|
modified = default.scale(grid)
|
||||||
tag = f'grid={util.srepr(grid,"-")}'
|
tag = f'grid={util.srepr(grid,"-")}'
|
||||||
reference = reference_dir/f'scale_{tag}.geom'
|
reference = reference_dir/f'scale_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,8 +242,8 @@ class TestGeom:
|
||||||
modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
||||||
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
||||||
reference = reference_dir/f'rotate_{tag}.geom'
|
reference = reference_dir/f'rotate_{tag}.geom'
|
||||||
if update: modified.to_file(reference)
|
if update: modified.save_ASCII(reference)
|
||||||
assert geom_equal(Geom.from_file(reference),
|
assert geom_equal(Geom.load_ASCII(reference),
|
||||||
modified)
|
modified)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from damask import Material
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def reference_dir(reference_dir_base):
|
||||||
|
"""Directory containing reference results."""
|
||||||
|
return reference_dir_base/'Material'
|
||||||
|
|
||||||
|
|
||||||
|
class TestMaterial:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('fname',[None,'test.yaml'])
|
||||||
|
def test_load_save(self,reference_dir,tmp_path,fname):
|
||||||
|
reference = Material.load(reference_dir/'material.yaml')
|
||||||
|
os.chdir(tmp_path)
|
||||||
|
if fname is None:
|
||||||
|
reference.save()
|
||||||
|
new = Material.load('material.yaml')
|
||||||
|
else:
|
||||||
|
reference.save(fname)
|
||||||
|
new = Material.load(fname)
|
||||||
|
assert reference == new
|
||||||
|
|
||||||
|
def test_valid_complete(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
assert material_config.is_valid and material_config.is_complete
|
||||||
|
|
||||||
|
def test_invalid_lattice(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
material_config['phase']['Aluminum']['lattice']='fxc'
|
||||||
|
assert not material_config.is_valid
|
||||||
|
|
||||||
|
def test_invalid_orientation(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
material_config['microstructure'][0]['constituents'][0]['orientation']=[0,0,0,0]
|
||||||
|
assert not material_config.is_valid
|
||||||
|
|
||||||
|
def test_invalid_fraction(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
material_config['microstructure'][0]['constituents'][0]['fraction']=.9
|
||||||
|
assert not material_config.is_valid
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('item',['homogenization','phase','microstructure'])
|
||||||
|
def test_incomplete_missing(self,reference_dir,item):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
del material_config[item]
|
||||||
|
assert not material_config.is_complete
|
||||||
|
|
||||||
|
def test_incomplete_wrong_phase(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
new = material_config.microstructure_rename_phase({'Steel':'FeNbC'})
|
||||||
|
assert not new.is_complete
|
||||||
|
|
||||||
|
def test_incomplete_wrong_homogenization(self,reference_dir):
|
||||||
|
material_config = Material.load(reference_dir/'material.yaml')
|
||||||
|
new = material_config.microstructure_rename_homogenization({'Taylor':'isostrain'})
|
||||||
|
assert not new.is_complete
|
|
@ -106,8 +106,8 @@ class TestOrientation:
|
||||||
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
||||||
table = Table(eu,{'Eulers':(3,)})
|
table = Table(eu,{'Eulers':(3,)})
|
||||||
table = table.add('pos',coords)
|
table = table.add('pos',coords)
|
||||||
table.to_ASCII(reference)
|
table.save(reference)
|
||||||
assert np.allclose(eu,Table.from_ASCII(reference).get('Eulers'))
|
assert np.allclose(eu,Table.load(reference).get('Eulers'))
|
||||||
|
|
||||||
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
||||||
def test_disorientation360(self,lattice):
|
def test_disorientation360(self,lattice):
|
||||||
|
@ -129,4 +129,3 @@ class TestOrientation:
|
||||||
eqs = [r for r in R_1.equivalent]
|
eqs = [r for r in R_1.equivalent]
|
||||||
R_2 = Orientation.from_average(eqs)
|
R_2 = Orientation.from_average(eqs)
|
||||||
assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion)
|
assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion)
|
||||||
|
|
||||||
|
|
|
@ -339,8 +339,8 @@ class TestResult:
|
||||||
@pytest.mark.parametrize('output',['F',[],['F','P']])
|
@pytest.mark.parametrize('output',['F',[],['F','P']])
|
||||||
def test_vtk(self,tmp_path,default,output):
|
def test_vtk(self,tmp_path,default,output):
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
default.to_vtk(output)
|
default.save_vtk(output)
|
||||||
|
|
||||||
def test_XDMF(self,tmp_path,single_phase):
|
def test_XDMF(self,tmp_path,single_phase):
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
single_phase.write_XDMF()
|
single_phase.save_XDMF()
|
||||||
|
|
|
@ -461,7 +461,7 @@ def mul(me, other):
|
||||||
if other.shape == (3,):
|
if other.shape == (3,):
|
||||||
A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:])
|
A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:])
|
||||||
B = 2.0 * np.dot(me.quaternion[1:],other)
|
B = 2.0 * np.dot(me.quaternion[1:],other)
|
||||||
C = 2.0 * _P*me.quaternion[0]
|
C = 2.0 * _P * me.quaternion[0]
|
||||||
|
|
||||||
return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other)
|
return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other)
|
||||||
|
|
||||||
|
@ -496,9 +496,8 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om),
|
||||||
(Rotation._om2eu,Rotation._eu2om),
|
(Rotation._om2eu,Rotation._eu2om),
|
||||||
|
@ -512,8 +511,7 @@ class TestRotation:
|
||||||
m = rot.as_matrix()
|
m = rot.as_matrix()
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.det(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu),
|
||||||
(Rotation._eu2om,Rotation._om2eu),
|
(Rotation._eu2om,Rotation._om2eu),
|
||||||
|
@ -531,9 +529,9 @@ class TestRotation:
|
||||||
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
||||||
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
||||||
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
||||||
ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
|
||||||
assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all()
|
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax),
|
||||||
(Rotation._ax2om,Rotation._om2ax),
|
(Rotation._ax2om,Rotation._om2ax),
|
||||||
|
@ -548,9 +546,8 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(m[3],np.pi,atol=atol):
|
if np.isclose(m[3],np.pi,atol=atol):
|
||||||
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro),
|
||||||
#(Rotation._ro2om,Rotation._om2ro),
|
#(Rotation._ro2om,Rotation._om2ro),
|
||||||
|
@ -566,8 +563,7 @@ class TestRotation:
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
||||||
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho),
|
||||||
(Rotation._ho2om,Rotation._om2ho),
|
(Rotation._ho2om,Rotation._om2ho),
|
||||||
|
@ -581,8 +577,7 @@ class TestRotation:
|
||||||
m = rot.as_homochoric()
|
m = rot.as_homochoric()
|
||||||
o = backward(forward(m))
|
o = backward(forward(m))
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.linalg.norm(o) < _R1 + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.linalg.norm(o) < _R1 + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu),
|
@pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu),
|
||||||
(Rotation._cu2om,Rotation._om2cu),
|
(Rotation._cu2om,Rotation._om2cu),
|
||||||
|
@ -598,8 +593,7 @@ class TestRotation:
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om),
|
||||||
(Rotation._qu2eu,qu2eu),
|
(Rotation._qu2eu,qu2eu),
|
||||||
|
@ -612,8 +606,7 @@ class TestRotation:
|
||||||
vectorized(qu.reshape(qu.shape[0]//2,-1,4))
|
vectorized(qu.reshape(qu.shape[0]//2,-1,4))
|
||||||
co = vectorized(qu)
|
co = vectorized(qu)
|
||||||
for q,c in zip(qu,co):
|
for q,c in zip(qu,co):
|
||||||
print(q,c)
|
assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)), f'{q},{c}'
|
||||||
assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu),
|
||||||
|
@ -625,8 +618,7 @@ class TestRotation:
|
||||||
vectorized(om.reshape(om.shape[0]//2,-1,3,3))
|
vectorized(om.reshape(om.shape[0]//2,-1,3,3))
|
||||||
co = vectorized(om)
|
co = vectorized(om)
|
||||||
for o,c in zip(om,co):
|
for o,c in zip(om,co):
|
||||||
print(o,c)
|
assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)), f'{o},{c}'
|
||||||
assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu),
|
||||||
(Rotation._eu2om,eu2om),
|
(Rotation._eu2om,eu2om),
|
||||||
|
@ -638,8 +630,7 @@ class TestRotation:
|
||||||
vectorized(eu.reshape(eu.shape[0]//2,-1,3))
|
vectorized(eu.reshape(eu.shape[0]//2,-1,3))
|
||||||
co = vectorized(eu)
|
co = vectorized(eu)
|
||||||
for e,c in zip(eu,co):
|
for e,c in zip(eu,co):
|
||||||
print(e,c)
|
assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)), f'{e},{c}'
|
||||||
assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu),
|
||||||
(Rotation._ax2om,ax2om),
|
(Rotation._ax2om,ax2om),
|
||||||
|
@ -651,8 +642,7 @@ class TestRotation:
|
||||||
vectorized(ax.reshape(ax.shape[0]//2,-1,4))
|
vectorized(ax.reshape(ax.shape[0]//2,-1,4))
|
||||||
co = vectorized(ax)
|
co = vectorized(ax)
|
||||||
for a,c in zip(ax,co):
|
for a,c in zip(ax,co):
|
||||||
print(a,c)
|
assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)), f'{a},{c}'
|
||||||
assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax),
|
||||||
|
@ -663,8 +653,7 @@ class TestRotation:
|
||||||
vectorized(ro.reshape(ro.shape[0]//2,-1,4))
|
vectorized(ro.reshape(ro.shape[0]//2,-1,4))
|
||||||
co = vectorized(ro)
|
co = vectorized(ro)
|
||||||
for r,c in zip(ro,co):
|
for r,c in zip(ro,co):
|
||||||
print(r,c)
|
assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)), f'{r},{c}'
|
||||||
assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax),
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax),
|
||||||
(Rotation._ho2cu,ho2cu)])
|
(Rotation._ho2cu,ho2cu)])
|
||||||
|
@ -674,8 +663,7 @@ class TestRotation:
|
||||||
vectorized(ho.reshape(ho.shape[0]//2,-1,3))
|
vectorized(ho.reshape(ho.shape[0]//2,-1,3))
|
||||||
co = vectorized(ho)
|
co = vectorized(ho)
|
||||||
for h,c in zip(ho,co):
|
for h,c in zip(ho,co):
|
||||||
print(h,c)
|
assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)), f'{h},{c}'
|
||||||
assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)])
|
@pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)])
|
||||||
def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single):
|
def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single):
|
||||||
|
@ -684,8 +672,7 @@ class TestRotation:
|
||||||
vectorized(cu.reshape(cu.shape[0]//2,-1,3))
|
vectorized(cu.reshape(cu.shape[0]//2,-1,3))
|
||||||
co = vectorized(cu)
|
co = vectorized(cu)
|
||||||
for u,c in zip(cu,co):
|
for u,c in zip(cu,co):
|
||||||
print(u,c)
|
assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)), f'{u},{c}'
|
||||||
assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('func',[Rotation.from_axis_angle])
|
@pytest.mark.parametrize('func',[Rotation.from_axis_angle])
|
||||||
def test_normalization_vectorization(self,func):
|
def test_normalization_vectorization(self,func):
|
||||||
|
@ -703,9 +690,8 @@ class TestRotation:
|
||||||
o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion()
|
o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('normalize',[True,False])
|
@pytest.mark.parametrize('normalize',[True,False])
|
||||||
|
@ -717,12 +703,12 @@ class TestRotation:
|
||||||
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers()
|
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers()
|
||||||
u = np.array([np.pi*2,np.pi,np.pi*2])
|
u = np.array([np.pi*2,np.pi,np.pi*2])
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
|
||||||
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
|
||||||
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
|
||||||
ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
|
||||||
assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all()
|
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
def test_matrix(self,set_of_rotations):
|
def test_matrix(self,set_of_rotations):
|
||||||
for rot in set_of_rotations:
|
for rot in set_of_rotations:
|
||||||
|
@ -731,8 +717,8 @@ class TestRotation:
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.isclose(m[3],np.pi,atol=atol):
|
if np.isclose(m[3],np.pi,atol=atol):
|
||||||
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9
|
and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('normalize',[True,False])
|
@pytest.mark.parametrize('normalize',[True,False])
|
||||||
|
@ -742,8 +728,7 @@ class TestRotation:
|
||||||
m = rot.as_matrix()
|
m = rot.as_matrix()
|
||||||
o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix()
|
o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o)
|
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}'
|
||||||
assert ok and np.isclose(np.linalg.det(o),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
def test_homochoric(self,set_of_rotations,P):
|
def test_homochoric(self,set_of_rotations,P):
|
||||||
|
@ -753,8 +738,7 @@ class TestRotation:
|
||||||
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues()
|
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues()
|
||||||
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
|
||||||
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
ok = ok or np.isclose(m[3],0.0,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0)
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
def test_cubochoric(self,set_of_rotations,P):
|
def test_cubochoric(self,set_of_rotations,P):
|
||||||
|
@ -762,8 +746,7 @@ class TestRotation:
|
||||||
m = rot.as_homochoric()
|
m = rot.as_homochoric()
|
||||||
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
|
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('P',[1,-1])
|
@pytest.mark.parametrize('P',[1,-1])
|
||||||
@pytest.mark.parametrize('accept_homomorph',[True,False])
|
@pytest.mark.parametrize('accept_homomorph',[True,False])
|
||||||
|
@ -774,9 +757,8 @@ class TestRotation:
|
||||||
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric()
|
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric()
|
||||||
ok = np.allclose(m,o,atol=atol)
|
ok = np.allclose(m,o,atol=atol)
|
||||||
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
|
||||||
ok = ok or np.allclose(m*-1.,o,atol=atol)
|
ok |= np.allclose(m*-1.,o,atol=atol)
|
||||||
print(m,o,rot.as_quaternion())
|
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}'
|
||||||
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('reciprocal',[True,False])
|
@pytest.mark.parametrize('reciprocal',[True,False])
|
||||||
def test_basis(self,set_of_rotations,reciprocal):
|
def test_basis(self,set_of_rotations,reciprocal):
|
||||||
|
@ -858,8 +840,7 @@ class TestRotation:
|
||||||
for rot in set_of_rotations:
|
for rot in set_of_rotations:
|
||||||
v = rot.broadcast_to((5,)) @ data
|
v = rot.broadcast_to((5,)) @ data
|
||||||
for i in range(data.shape[0]):
|
for i in range(data.shape[0]):
|
||||||
print(i-data[i])
|
assert np.allclose(mul(rot,data[i]),v[i]), f'{i-data[i]}'
|
||||||
assert np.allclose(mul(rot,data[i]),v[i])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('data',[np.random.rand(3),
|
@pytest.mark.parametrize('data',[np.random.rand(3),
|
||||||
|
@ -926,34 +907,39 @@ class TestRotation:
|
||||||
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
||||||
@pytest.mark.parametrize('N',[1000,10000,100000])
|
@pytest.mark.parametrize('N',[1000,10000,100000])
|
||||||
def test_spherical_component(self,N,sigma):
|
def test_spherical_component(self,N,sigma):
|
||||||
c = Rotation.from_random()
|
p = []
|
||||||
o = Rotation.from_spherical_component(c,sigma,N)
|
for run in range(5):
|
||||||
_, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True)
|
c = Rotation.from_random()
|
||||||
angles[::2] *= -1 # flip angle for every second to symmetrize distribution
|
o = Rotation.from_spherical_component(c,sigma,N)
|
||||||
|
_, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True)
|
||||||
|
angles[::2] *= -1 # flip angle for every second to symmetrize distribution
|
||||||
|
|
||||||
|
p.append(stats.normaltest(angles)[1])
|
||||||
|
|
||||||
p = stats.normaltest(angles)[1]
|
|
||||||
sigma_out = np.std(angles)
|
sigma_out = np.std(angles)
|
||||||
print(f'\np: {p}, sigma ratio {sigma/sigma_out}')
|
p = np.average(p)
|
||||||
assert (.9 < sigma/sigma_out < 1.1) and p > 0.001
|
assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
||||||
@pytest.mark.parametrize('N',[1000,10000,100000])
|
@pytest.mark.parametrize('N',[1000,10000,100000])
|
||||||
def test_from_fiber_component(self,N,sigma):
|
def test_from_fiber_component(self,N,sigma):
|
||||||
"""https://en.wikipedia.org/wiki/Full_width_at_half_maximum."""
|
p = []
|
||||||
alpha = np.random.random(2)*np.pi
|
for run in range(5):
|
||||||
beta = np.random.random(2)*np.pi
|
alpha = np.random.random()*2*np.pi,np.arccos(np.random.random())
|
||||||
|
beta = np.random.random()*2*np.pi,np.arccos(np.random.random())
|
||||||
|
|
||||||
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
||||||
f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )])
|
f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )])
|
||||||
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
||||||
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
|
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
|
||||||
|
|
||||||
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False)
|
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False)
|
||||||
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1))
|
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1))
|
||||||
dist = np.array(angles) * (np.random.randint(0,2,N)*2-1)
|
dist = np.array(angles) * (np.random.randint(0,2,N)*2-1)
|
||||||
|
|
||||||
|
p.append(stats.normaltest(dist)[1])
|
||||||
|
|
||||||
p = stats.normaltest(dist)[1]
|
|
||||||
sigma_out = np.degrees(np.std(dist))
|
sigma_out = np.degrees(np.std(dist))
|
||||||
print(f'\np: {p}, sigma ratio {sigma/sigma_out}')
|
p = np.average(p)
|
||||||
assert (.9 < sigma/sigma_out < 1.1) and p > 0.001
|
assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}'
|
||||||
|
|
|
@ -35,50 +35,50 @@ class TestTable:
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['str','path'])
|
@pytest.mark.parametrize('mode',['str','path'])
|
||||||
def test_write_read(self,default,tmpdir,mode):
|
def test_write_read(self,default,tmpdir,mode):
|
||||||
default.to_file(tmpdir/'default.txt')
|
default.save(tmpdir/'default.txt')
|
||||||
if mode == 'path':
|
if mode == 'path':
|
||||||
new = Table.from_ASCII(tmpdir/'default.txt')
|
new = Table.load(tmpdir/'default.txt')
|
||||||
elif mode == 'str':
|
elif mode == 'str':
|
||||||
new = Table.from_ASCII(str(tmpdir/'default.txt'))
|
new = Table.load(str(tmpdir/'default.txt'))
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_read_file(self,default,tmpdir):
|
def test_write_read_file(self,default,tmpdir):
|
||||||
with open(tmpdir/'default.txt','w') as f:
|
with open(tmpdir/'default.txt','w') as f:
|
||||||
default.to_file(f)
|
default.save(f)
|
||||||
with open(tmpdir/'default.txt') as f:
|
with open(tmpdir/'default.txt') as f:
|
||||||
new = Table.from_ASCII(f)
|
new = Table.load(f)
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_read_new_style(self,default,tmpdir):
|
def test_write_read_legacy_style(self,default,tmpdir):
|
||||||
with open(tmpdir/'new_style.txt','w') as f:
|
with open(tmpdir/'legacy.txt','w') as f:
|
||||||
default.to_file(f,new_style=True)
|
default.save(f,legacy=True)
|
||||||
with open(tmpdir/'new_style.txt') as f:
|
with open(tmpdir/'legacy.txt') as f:
|
||||||
new = Table.from_ASCII(f)
|
new = Table.load(f)
|
||||||
assert all(default.data==new.data) and default.shapes == new.shapes
|
assert all(default.data==new.data) and default.shapes == new.shapes
|
||||||
|
|
||||||
def test_write_invalid_format(self,default,tmpdir):
|
def test_write_invalid_format(self,default,tmpdir):
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid')
|
default.save(tmpdir/'shouldnotbethere.txt',format='invalid')
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['str','path'])
|
@pytest.mark.parametrize('mode',['str','path'])
|
||||||
def test_read_ang(self,reference_dir,mode):
|
def test_read_ang(self,reference_dir,mode):
|
||||||
if mode == 'path':
|
if mode == 'path':
|
||||||
new = Table.from_ang(reference_dir/'simple.ang')
|
new = Table.load_ang(reference_dir/'simple.ang')
|
||||||
elif mode == 'str':
|
elif mode == 'str':
|
||||||
new = Table.from_ang(str(reference_dir/'simple.ang'))
|
new = Table.load_ang(str(reference_dir/'simple.ang'))
|
||||||
assert new.data.shape == (4,10) and \
|
assert new.data.shape == (4,10) and \
|
||||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||||
|
|
||||||
def test_read_ang_file(self,reference_dir):
|
def test_read_ang_file(self,reference_dir):
|
||||||
f = open(reference_dir/'simple.ang')
|
f = open(reference_dir/'simple.ang')
|
||||||
new = Table.from_ang(f)
|
new = Table.load_ang(f)
|
||||||
assert new.data.shape == (4,10) and \
|
assert new.data.shape == (4,10) and \
|
||||||
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
|
||||||
|
|
||||||
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
|
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
|
||||||
def test_read_strange(self,reference_dir,fname):
|
def test_read_strange(self,reference_dir,fname):
|
||||||
with open(reference_dir/fname) as f:
|
with open(reference_dir/fname) as f:
|
||||||
Table.from_ASCII(f)
|
Table.load(f)
|
||||||
|
|
||||||
def test_set(self,default):
|
def test_set(self,default):
|
||||||
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
||||||
|
@ -166,7 +166,7 @@ class TestTable:
|
||||||
x = np.random.random((5,12))
|
x = np.random.random((5,12))
|
||||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||||
unsort = t.get('4_F')
|
unsort = t.get('4_F')
|
||||||
sort = t.sort_by('4_F').get('4_F')
|
sort = t.sort_by('4_F').get('4_F')
|
||||||
assert np.all(np.sort(unsort,0)==sort)
|
assert np.all(np.sort(unsort,0)==sort)
|
||||||
|
|
||||||
def test_sort_revert(self):
|
def test_sort_revert(self):
|
||||||
|
@ -179,6 +179,6 @@ class TestTable:
|
||||||
t = Table(np.array([[0,1,],[2,1,]]),
|
t = Table(np.array([[0,1,],[2,1,]]),
|
||||||
{'v':(2,)},
|
{'v':(2,)},
|
||||||
['test data'])\
|
['test data'])\
|
||||||
.add('s',np.array(['b','a']))\
|
.add('s',np.array(['b','a']))\
|
||||||
.sort_by('s')
|
.sort_by('s')
|
||||||
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
||||||
|
|
|
@ -32,22 +32,22 @@ class TestVTK:
|
||||||
origin = np.random.random(3)
|
origin = np.random.random(3)
|
||||||
v = VTK.from_rectilinearGrid(grid,size,origin)
|
v = VTK.from_rectilinearGrid(grid,size,origin)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'rectilinearGrid',False)
|
v.save(tmp_path/'rectilinearGrid',False)
|
||||||
vtr = VTK.from_file(tmp_path/'rectilinearGrid.vtr')
|
vtr = VTK.load(tmp_path/'rectilinearGrid.vtr')
|
||||||
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
|
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
|
vtk = VTK.load(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
|
||||||
assert(string == vtr.__repr__() == vtk.__repr__())
|
assert(string == vtr.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
def test_polyData(self,tmp_path):
|
def test_polyData(self,tmp_path):
|
||||||
points = np.random.rand(100,3)
|
points = np.random.rand(100,3)
|
||||||
v = VTK.from_polyData(points)
|
v = VTK.from_polyData(points)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'polyData',False)
|
v.save(tmp_path/'polyData',False)
|
||||||
vtp = VTK.from_file(tmp_path/'polyData.vtp')
|
vtp = VTK.load(tmp_path/'polyData.vtp')
|
||||||
with open(tmp_path/'polyData.vtk','w') as f:
|
with open(tmp_path/'polyData.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'polyData.vtk','polyData')
|
vtk = VTK.load(tmp_path/'polyData.vtk','polyData')
|
||||||
assert(string == vtp.__repr__() == vtk.__repr__())
|
assert(string == vtp.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
@pytest.mark.parametrize('cell_type,n',[
|
@pytest.mark.parametrize('cell_type,n',[
|
||||||
|
@ -62,11 +62,11 @@ class TestVTK:
|
||||||
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
|
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
|
||||||
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
|
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
|
||||||
string = v.__repr__()
|
string = v.__repr__()
|
||||||
v.to_file(tmp_path/'unstructuredGrid',False)
|
v.save(tmp_path/'unstructuredGrid',False)
|
||||||
vtu = VTK.from_file(tmp_path/'unstructuredGrid.vtu')
|
vtu = VTK.load(tmp_path/'unstructuredGrid.vtu')
|
||||||
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
|
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
|
||||||
f.write(string)
|
f.write(string)
|
||||||
vtk = VTK.from_file(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
|
vtk = VTK.load(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
|
||||||
assert(string == vtu.__repr__() == vtk.__repr__())
|
assert(string == vtu.__repr__() == vtk.__repr__())
|
||||||
|
|
||||||
|
|
||||||
|
@ -75,8 +75,8 @@ class TestVTK:
|
||||||
v = VTK.from_polyData(points)
|
v = VTK.from_polyData(points)
|
||||||
fname_s = tmp_path/'single.vtp'
|
fname_s = tmp_path/'single.vtp'
|
||||||
fname_p = tmp_path/'parallel.vtp'
|
fname_p = tmp_path/'parallel.vtp'
|
||||||
v.to_file(fname_s,False)
|
v.save(fname_s,False)
|
||||||
v.to_file(fname_p,True)
|
v.save(fname_p,True)
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p):
|
if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p):
|
||||||
assert(True)
|
assert(True)
|
||||||
|
@ -90,11 +90,11 @@ class TestVTK:
|
||||||
('this_file_does_not_exist.vtx', None)])
|
('this_file_does_not_exist.vtx', None)])
|
||||||
def test_invalid_dataset_type(self,name,dataset_type):
|
def test_invalid_dataset_type(self,name,dataset_type):
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
VTK.from_file(name,dataset_type)
|
VTK.load(name,dataset_type)
|
||||||
|
|
||||||
def test_invalid_extension_write(self,default):
|
def test_invalid_extension_write(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
default.to_file('default.txt')
|
default.save('default.txt')
|
||||||
|
|
||||||
def test_invalid_get(self,default):
|
def test_invalid_get(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -115,8 +115,8 @@ class TestVTK:
|
||||||
|
|
||||||
def test_comments(self,tmp_path,default):
|
def test_comments(self,tmp_path,default):
|
||||||
default.add_comments(['this is a comment'])
|
default.add_comments(['this is a comment'])
|
||||||
default.to_file(tmp_path/'with_comments',parallel=False)
|
default.save(tmp_path/'with_comments',parallel=False)
|
||||||
new = VTK.from_file(tmp_path/'with_comments.vtr')
|
new = VTK.load(tmp_path/'with_comments.vtr')
|
||||||
assert new.get_comments() == ['this is a comment']
|
assert new.get_comments() == ['this is a comment']
|
||||||
|
|
||||||
def test_compare_reference_polyData(self,update,reference_dir,tmp_path):
|
def test_compare_reference_polyData(self,update,reference_dir,tmp_path):
|
||||||
|
@ -124,9 +124,9 @@ class TestVTK:
|
||||||
polyData = VTK.from_polyData(points)
|
polyData = VTK.from_polyData(points)
|
||||||
polyData.add(points,'coordinates')
|
polyData.add(points,'coordinates')
|
||||||
if update:
|
if update:
|
||||||
polyData.to_file(reference_dir/'polyData')
|
polyData.save(reference_dir/'polyData')
|
||||||
else:
|
else:
|
||||||
reference = VTK.from_file(reference_dir/'polyData.vtp')
|
reference = VTK.load(reference_dir/'polyData.vtp')
|
||||||
assert polyData.__repr__() == reference.__repr__() and \
|
assert polyData.__repr__() == reference.__repr__() and \
|
||||||
np.allclose(polyData.get('coordinates'),points)
|
np.allclose(polyData.get('coordinates'),points)
|
||||||
|
|
||||||
|
@ -139,8 +139,8 @@ class TestVTK:
|
||||||
rectilinearGrid.add(c,'cell')
|
rectilinearGrid.add(c,'cell')
|
||||||
rectilinearGrid.add(n,'node')
|
rectilinearGrid.add(n,'node')
|
||||||
if update:
|
if update:
|
||||||
rectilinearGrid.to_file(reference_dir/'rectilinearGrid')
|
rectilinearGrid.save(reference_dir/'rectilinearGrid')
|
||||||
else:
|
else:
|
||||||
reference = VTK.from_file(reference_dir/'rectilinearGrid.vtr')
|
reference = VTK.load(reference_dir/'rectilinearGrid.vtr')
|
||||||
assert rectilinearGrid.__repr__() == reference.__repr__() and \
|
assert rectilinearGrid.__repr__() == reference.__repr__() and \
|
||||||
np.allclose(rectilinearGrid.get('cell'),c)
|
np.allclose(rectilinearGrid.get('cell'),c)
|
||||||
|
|
|
@ -18,8 +18,8 @@ class TestUtil:
|
||||||
|
|
||||||
@pytest.mark.parametrize('input,output',
|
@pytest.mark.parametrize('input,output',
|
||||||
[
|
[
|
||||||
([2,0],[1,0]),
|
([0,-2],[0,-1]),
|
||||||
([0.5,0.5],[1,1]),
|
([-0.5,0.5],[-1,1]),
|
||||||
([1./2.,1./3.],[3,2]),
|
([1./2.,1./3.],[3,2]),
|
||||||
([2./3.,1./2.,1./3.],[4,3,2]),
|
([2./3.,1./2.,1./3.],[4,3,2]),
|
||||||
])
|
])
|
||||||
|
@ -30,4 +30,4 @@ class TestUtil:
|
||||||
|
|
||||||
def test_lackofprecision(self):
|
def test_lackofprecision(self):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
util.scale_to_coprime(np.array([1/3333,1,1]))
|
util.scale_to_coprime(np.array([1/333.333,1,1]))
|
||||||
|
|
Loading…
Reference in New Issue