From 96ad358f83ebcf5d561fb0d33855b45564635289 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 15 Sep 2020 00:58:06 -0400 Subject: [PATCH 01/27] modified file reading/writing methods to load_X, save_X formalism --- PRIVATE | 2 +- processing/legacy/addAPS34IDEstrainCoords.py | 9 +- processing/legacy/addCumulative.py | 10 +- processing/post/DADF5_postResults.py | 6 +- processing/post/addCompatibilityMismatch.py | 4 +- processing/post/addCurl.py | 4 +- processing/post/addDerivative.py | 10 +- processing/post/addDisplacement.py | 14 +- processing/post/addDivergence.py | 4 +- processing/post/addEuclideanDistance.py | 6 +- processing/post/addGaussian.py | 4 +- processing/post/addGradient.py | 4 +- processing/post/addOrientations.py | 4 +- processing/post/addSchmidfactors.py | 4 +- processing/post/filterTable.py | 18 +- processing/post/permuteData.py | 4 +- processing/pre/geom_fromDREAM3D.py | 2 +- processing/pre/geom_fromMinimalSurface.py | 2 +- processing/pre/geom_fromOsteonGeometry.py | 2 +- processing/pre/geom_fromTable.py | 5 +- .../pre/geom_fromVoronoiTessellation.py | 5 +- processing/pre/geom_grainGrowth.py | 6 +- processing/pre/hybridIA_linODFsampling.py | 48 +-- processing/pre/mentat_spectralBox.py | 14 +- processing/pre/seeds_fromDistribution.py | 19 +- processing/pre/seeds_fromGeom.py | 8 +- processing/pre/seeds_fromPokes.py | 8 +- processing/pre/seeds_fromRandom.py | 6 +- python/damask/_colormap.py | 128 ++++---- python/damask/_geom.py | 196 ++++++------- python/damask/_result.py | 4 +- python/damask/_table.py | 92 +++--- python/damask/_test.py | 59 ++-- python/damask/_vtk.py | 4 +- ...sualize_hybridIA_sampling-checkpoint.ipynb | 6 + .../Visualize_hybridIA_sampling.ipynb | 276 ++++++++++++++++++ python/tests/test_Colormap.py | 39 ++- python/tests/test_Geom.py | 48 ++- python/tests/test_Orientation.py | 3 +- python/tests/test_Result.py | 2 +- python/tests/test_Table.py | 36 +-- python/tests/test_VTK.py | 38 +-- 42 files changed, 708 insertions(+), 455 deletions(-) create mode 100644 python/tests/reference/Rotation/.ipynb_checkpoints/Visualize_hybridIA_sampling-checkpoint.ipynb create mode 100644 python/tests/reference/Rotation/Visualize_hybridIA_sampling.ipynb diff --git a/PRIVATE b/PRIVATE index 65ec74c07..5b7c34e58 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 65ec74c07052e77f35a4b5e80bf110aff1f5ae61 +Subproject commit 5b7c34e586589141d1e061f021512e65308fc1f7 diff --git a/processing/legacy/addAPS34IDEstrainCoords.py b/processing/legacy/addAPS34IDEstrainCoords.py index 9ba51ada1..c82ff35a3 100755 --- a/processing/legacy/addAPS34IDEstrainCoords.py +++ b/processing/legacy/addAPS34IDEstrainCoords.py @@ -42,11 +42,10 @@ rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi]) for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + coord = - table.get(options.frame) coord[:,2] += table.get(options.depth)[:,0] - table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:])) - - table.to_file(sys.stdout if name is None else name) + table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\ + .save_ASCII(sys.stdout if name is None else name) diff --git a/processing/legacy/addCumulative.py b/processing/legacy/addCumulative.py index f84828a89..14d8bcfea 100755 --- a/processing/legacy/addCumulative.py +++ b/processing/legacy/addCumulative.py @@ -39,10 +39,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: - table.add('cum_{}({})'.format('prod' if options.product else 'sum',label), - np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), - scriptID+' '+' '.join(sys.argv[1:])) + table = table.add('cum_{}({})'.format('prod' if options.product else 'sum',label), + np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), + scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/DADF5_postResults.py b/processing/post/DADF5_postResults.py index e81330581..fa9df27b0 100755 --- a/processing/post/DADF5_postResults.py +++ b/processing/post/DADF5_postResults.py @@ -38,8 +38,8 @@ for filename in options.filenames: N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1 N_digits = 5 # hack to keep test intact for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)): - table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)}) - table = table.add('pos',coords.reshape(-1,3)) + table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\ + .add('pos',coords.reshape(-1,3)) results.pick('materialpoints',False) results.pick('constituents', True) @@ -60,4 +60,4 @@ for filename in options.filenames: os.mkdir(dirname,0o755) file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0], inc[3:].zfill(N_digits)) - table.to_file(os.path.join(dirname,file_out)) + table.save_ASCII(os.path.join(dirname,file_out)) diff --git a/processing/post/addCompatibilityMismatch.py b/processing/post/addCompatibilityMismatch.py index 5009d44a0..e274092d2 100755 --- a/processing/post/addCompatibilityMismatch.py +++ b/processing/post/addCompatibilityMismatch.py @@ -172,7 +172,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) @@ -191,4 +191,4 @@ for name in filenames: volumeMismatch.reshape(-1,1,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 1033e3303..9adb06a00 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDerivative.py b/processing/post/addDerivative.py index b6b19c98a..179715e23 100755 --- a/processing/post/addDerivative.py +++ b/processing/post/addDerivative.py @@ -14,9 +14,9 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) def derivative(coordinates,what): - + result = np.empty_like(what) - + # use differentiation by interpolation # as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf @@ -31,7 +31,7 @@ def derivative(coordinates,what): (coordinates[0] - coordinates[1]) result[-1,:] = (what[-1,:] - what[-2,:]) / \ (coordinates[-1] - coordinates[-2]) - + return result @@ -65,10 +65,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: table = table.add('d({})/d({})'.format(label,options.coordinates), derivative(table.get(options.coordinates),table.get(label)), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index f1ab565b0..8dc3051d3 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -47,25 +47,25 @@ parser.set_defaults(f = 'f', for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) if options.nodal: - table = damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'), + damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'), {'pos':(3,)})\ .add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), - scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') + scriptID+' '+' '.join(sys.argv[1:]))\ + .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') else: - table = table.add('avg({}).{}'.format(options.f,options.pos), + table.add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), - scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + scriptID+' '+' '.join(sys.argv[1:]))\ + .save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDivergence.py b/processing/post/addDivergence.py index 6495793cf..b4c0bb7b4 100755 --- a/processing/post/addDivergence.py +++ b/processing/post/addDivergence.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index f5cf58ab3..305fc2ad5 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -142,7 +142,7 @@ for i,feature in enumerate(features): for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) neighborhood = neighborhoods[options.neighborhood] @@ -158,7 +158,7 @@ for name in filenames: diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point... # ...to every one in the specified neighborhood # for same IDs at both locations ==> 0 - + diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)... # ...reflects number of unique neighbors uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0]) @@ -184,4 +184,4 @@ for name in filenames: distance[i,:], scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addGaussian.py b/processing/post/addGaussian.py index 8e58da884..708617c1f 100755 --- a/processing/post/addGaussian.py +++ b/processing/post/addGaussian.py @@ -63,7 +63,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) damask.grid_filters.coord0_check(table.get(options.pos)) for label in options.labels: @@ -73,4 +73,4 @@ for name in filenames: mode = 'wrap' if options.periodic else 'nearest'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addGradient.py b/processing/post/addGradient.py index 718a972f3..e216ed34c 100755 --- a/processing/post/addGradient.py +++ b/processing/post/addGradient.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index dddc14193..3a320241c 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -110,7 +110,7 @@ R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degree for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) if options.eulers is not None: label = options.eulers @@ -147,4 +147,4 @@ for name in filenames: if 'axisangle' in options.output: table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index dc4117d78..40db4da34 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -175,7 +175,7 @@ labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]' for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) o = damask.Rotation.from_quaternion(table.get(options.quaternion)) @@ -189,4 +189,4 @@ for name in filenames: for i,label in enumerate(labels): table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/filterTable.py b/processing/post/filterTable.py index 494257a60..4f4af088b 100755 --- a/processing/post/filterTable.py +++ b/processing/post/filterTable.py @@ -27,7 +27,7 @@ def sortingList(labels,whitelistitems): else: indices.append(0) names.append(label) - + return [indices,names,whitelistitems] @@ -72,11 +72,11 @@ for name in filenames: continue damask.util.report(scriptName,name) -# ------------------------------------------ assemble info --------------------------------------- +# ------------------------------------------ assemble info --------------------------------------- table.head_read() -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- specials = { \ '_row_': 0, @@ -103,12 +103,12 @@ for name in filenames: else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem else: order = range(len(labels)) # maintain original order of labels - + # --------------------------------------- evaluate condition --------------------------------------- if options.condition is not None: condition = options.condition # copy per file, since might be altered inline breaker = False - + for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups idx = table.label_index(column) dim = table.label_dimension(column) @@ -123,11 +123,11 @@ for name in filenames: 's#':'str'}[marker],idx) # take float or string value of data column elif dim > 1: # multidimensional input (vector, tensor, etc.) replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation - + condition = condition.replace('#'+all+'#',replacement) - + if breaker: continue # found mistake in condition evaluation --> next file - + # ------------------------------------------ assemble header --------------------------------------- table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) @@ -138,7 +138,7 @@ for name in filenames: # ------------------------------------------ process and output data ------------------------------------------ positions = np.array(positions)[order] - + atOnce = options.condition is None if atOnce: # read full array and filter columns try: diff --git a/processing/post/permuteData.py b/processing/post/permuteData.py index 316fdd3da..af184924d 100755 --- a/processing/post/permuteData.py +++ b/processing/post/permuteData.py @@ -47,7 +47,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file rng = np.random.default_rng(randomSeed) @@ -58,4 +58,4 @@ for name in filenames: rng.shuffle(uniques) table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/processing/pre/geom_fromDREAM3D.py b/processing/pre/geom_fromDREAM3D.py index 6b7ccc21a..471435766 100755 --- a/processing/pre/geom_fromDREAM3D.py +++ b/processing/pre/geom_fromDREAM3D.py @@ -154,4 +154,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False) + geom.save_ASCII(os.path.splitext(name)[0]+'.geom',pack=False) diff --git a/processing/pre/geom_fromMinimalSurface.py b/processing/pre/geom_fromMinimalSurface.py index e6289ba0a..b64bac417 100755 --- a/processing/pre/geom_fromMinimalSurface.py +++ b/processing/pre/geom_fromMinimalSurface.py @@ -89,4 +89,4 @@ geom=damask.Geom(microstructure,options.size, comments=[scriptID + ' ' + ' '.join(sys.argv[1:])]) damask.util.croak(geom) -geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) +geom.save_ASCII(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_fromOsteonGeometry.py b/processing/pre/geom_fromOsteonGeometry.py index 6c6326163..c2583d0ed 100755 --- a/processing/pre/geom_fromOsteonGeometry.py +++ b/processing/pre/geom_fromOsteonGeometry.py @@ -142,4 +142,4 @@ geom = damask.Geom(microstructure.reshape(grid), homogenization=options.homogenization,comments=header) damask.util.croak(geom) -geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) +geom.save_ASCII(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index dc968b82e..11c0761b5 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -68,7 +68,7 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) @@ -105,5 +105,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom', - format='ASCII',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) diff --git a/processing/pre/geom_fromVoronoiTessellation.py b/processing/pre/geom_fromVoronoiTessellation.py index 67a0dfecc..a3a54882a 100755 --- a/processing/pre/geom_fromVoronoiTessellation.py +++ b/processing/pre/geom_fromVoronoiTessellation.py @@ -171,7 +171,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) size = np.ones(3) origin = np.zeros(3) @@ -228,5 +228,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom', - format='ASCII',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index 0652d0583..dbe1f1a74 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -62,7 +62,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) grid_original = geom.get_grid() damask.util.croak(geom) @@ -169,7 +169,7 @@ for name in filenames: # undo any changes involving immutable microstructures microstructure = np.where(immutable, microstructure_original,microstructure) - geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]) + geom = geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) + geom.save_ASCII(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/hybridIA_linODFsampling.py b/processing/pre/hybridIA_linODFsampling.py index 6f5827f8b..01704197e 100755 --- a/processing/pre/hybridIA_linODFsampling.py +++ b/processing/pre/hybridIA_linODFsampling.py @@ -31,7 +31,7 @@ def binAsBins(bin,intervals): bins[1] = (bin//intervals[2]) % intervals[1] bins[2] = bin % intervals[2] return bins - + def binsAsBin(bins,intervals): """Implode 3D bins into compound bin.""" return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2] @@ -95,7 +95,7 @@ def directInversion (ODF,nSamples): float(nInvSamples)/nOptSamples-1.0, scale,nSamples)) repetition = [None]*ODF['nBins'] # preallocate and clear - + for bin in range(ODF['nBins']): # loop over bins repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition @@ -105,7 +105,7 @@ def directInversion (ODF,nSamples): for bin in range(ODF['nBins']): set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation i += repetition[bin] # advance set counter - + orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples @@ -117,7 +117,7 @@ def directInversion (ODF,nSamples): orientations[j] = np.degrees(Eulers) reconstructedODF[bin] += unitInc set[ex] = set[j] # exchange orientations - + return orientations, reconstructedODF @@ -130,7 +130,7 @@ def MonteCarloEulers (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + for j in range(nSamples): MC = maxdV_V*2.0 bin = 0 @@ -153,7 +153,7 @@ def MonteCarloBins (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + for j in range(nSamples): MC = maxdV_V*2.0 bin = 0 @@ -173,14 +173,14 @@ def TothVanHoutteSTAT (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + selectors = [random.random() for i in range(nSamples)] selectors.sort() indexSelector = 0 - + cumdV_V = 0.0 countSamples = 0 - + for bin in range(ODF['nBins']) : cumdV_V += ODF['dV_V'][bin] while indexSelector < nSamples and selectors[indexSelector] < cumdV_V: @@ -191,7 +191,7 @@ def TothVanHoutteSTAT (ODF,nSamples): indexSelector += 1 damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples)) - + return orientations, reconstructedODF @@ -233,8 +233,8 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + + table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file random.seed(randomSeed) @@ -253,7 +253,7 @@ for name in filenames: if eulers.shape[0] != ODF['nBins']: damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0])) continue - + # ----- build binnedODF array and normalize ------------------------------------------------------ sumdV_V = 0.0 ODF['dV_V'] = [None]*ODF['nBins'] @@ -267,7 +267,7 @@ for name in filenames: if ODF['dV_V'][b] > 0.0: sumdV_V += ODF['dV_V'][b] ODF['nNonZero'] += 1 - + for b in range(ODF['nBins']): ODF['dV_V'][b] /= sumdV_V # normalize dV/V @@ -277,19 +277,19 @@ for name in filenames: 'Volume integral of ODF: %12.11f\n'%sumdV_V, 'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))), ]) - + Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'} method = Functions[options.algorithm] Orientations, ReconstructedODF = (globals()[method])(ODF,options.number) - + # calculate accuracy of sample squaredDiff = {'orig':0.0,method:0.0} squaredRelDiff = {'orig':0.0,method:0.0} mutualProd = {'orig':0.0,method:0.0} indivSum = {'orig':0.0,method:0.0} indivSquaredSum = {'orig':0.0,method:0.0} - + for bin in range(ODF['nBins']): squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2 if ODF['dV_V'][bin] > 0.0: @@ -299,7 +299,7 @@ for name in filenames: indivSquaredSum[method] += ReconstructedODF[bin]**2 indivSum['orig'] += ODF['dV_V'][bin] indivSquaredSum['orig'] += ODF['dV_V'][bin]**2 - + damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]), 'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]), 'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']), @@ -311,10 +311,10 @@ for name in filenames: (ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\ (indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))), ]) - + if method == 'IA' and options.number < ODF['nNonZero']: strOpt = '(%i)'%ODF['nNonZero'] - + formatwidth = 1+int(math.log10(options.number)) materialConfig = [ @@ -324,12 +324,12 @@ for name in filenames: '', '#-------------------#', ] - + for i,ID in enumerate(range(options.number)): materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)), '(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)), ] - + materialConfig += [ '#-------------------#', '', @@ -338,12 +338,12 @@ for name in filenames: for ID in range(options.number): eulers = Orientations[ID] - + materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)), '(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers), ] -#--- output finalization -------------------------------------------------------------------------- +#--- output finalization -------------------------------------------------------------------------- with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile: outfile.write('\n'.join(materialConfig)+'\n') diff --git a/processing/pre/mentat_spectralBox.py b/processing/pre/mentat_spectralBox.py index 027240044..e9a246f8f 100755 --- a/processing/pre/mentat_spectralBox.py +++ b/processing/pre/mentat_spectralBox.py @@ -42,7 +42,7 @@ def output(cmds,locals,dest): else: outFile(str(cmd),locals,dest) - + #------------------------------------------------------------------------------------------------- def init(): return [ @@ -114,7 +114,7 @@ def material(): "*add_geometry_elements", "all_existing", ] - + #------------------------------------------------------------------------------------------------- def geometry(): @@ -127,14 +127,14 @@ def geometry(): "*element_type 7", "all_existing", ] - + #------------------------------------------------------------------------------------------------- def initial_conditions(microstructures): elements = [] element = 0 for id in microstructures: - element += 1 + element += 1 if len(elements) < id: for i in range(id-len(elements)): elements.append([]) @@ -195,8 +195,8 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) microstructure = geom.get_microstructure().flatten(order='F') cmds = [\ @@ -210,7 +210,7 @@ for name in filenames: '*redraw', '*draw_automatic', ] - + outputLocals = {} if options.port: py_mentat.py_connect('',options.port) diff --git a/processing/pre/seeds_fromDistribution.py b/processing/pre/seeds_fromDistribution.py index 1a4ec6971..1cf35450c 100755 --- a/processing/pre/seeds_fromDistribution.py +++ b/processing/pre/seeds_fromDistribution.py @@ -78,7 +78,7 @@ class myThread (threading.Thread): perturbedSeedsVFile = StringIO() myBestSeedsVFile.seek(0) - perturbedSeedsTable = damask.Table.from_ASCII(myBestSeedsVFile) + perturbedSeedsTable = damask.Table.load_ASCII(myBestSeedsVFile) coords = perturbedSeedsTable.get('pos') i = 0 for ms,coord in enumerate(coords): @@ -89,8 +89,7 @@ class myThread (threading.Thread): coords[i]=newCoords direction[i]*=2. i+= 1 - perturbedSeedsTable.set('pos',coords) - perturbedSeedsTable.to_file(perturbedSeedsVFile) + perturbedSeedsTable.set('pos',coords).save_ASCII(perturbedSeedsVFile) #--- do tesselation with perturbed seed file ------------------------------------------------------ perturbedGeomVFile.close() @@ -101,7 +100,7 @@ class myThread (threading.Thread): perturbedGeomVFile.seek(0) #--- evaluate current seeds file ------------------------------------------------------------------ - perturbedGeom = damask.Geom.from_file(perturbedGeomVFile) + perturbedGeom = damask.Geom.load_ASCII(perturbedGeomVFile) myNmicrostructures = len(np.unique(perturbedGeom.microstructure)) currentData=np.bincount(perturbedGeom.microstructure.ravel())[1:]/points currentError=[] @@ -213,14 +212,14 @@ if options.randomSeed is None: options.randomSeed = int(os.urandom(4).hex(),16) damask.util.croak(options.randomSeed) delta = options.scale/np.array(options.grid) -baseFile=os.path.splitext(os.path.basename(options.seedFile))[0] +baseFile = os.path.splitext(os.path.basename(options.seedFile))[0] points = np.array(options.grid).prod().astype('float') # ----------- calculate target distribution and bin edges -targetGeom = damask.Geom.from_file(os.path.splitext(os.path.basename(options.target))[0]+'.geom') +targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom') nMicrostructures = len(np.unique(targetGeom.microstructure)) targetVolFrac = np.bincount(targetGeom.microstructure.flatten())/targetGeom.grid.prod().astype(np.float) -target=[] +target = [] for i in range(1,nMicrostructures+1): targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries target.append({'histogram':targetHist,'bins':targetBins}) @@ -243,7 +242,7 @@ initialGeomVFile = StringIO() initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+ ' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0]) initialGeomVFile.seek(0) -initialGeom = damask.Geom.from_file(initialGeomVFile) +initialGeom = damask.Geom.load_ASCII(initialGeomVFile) if len(np.unique(targetGeom.microstructure)) != nMicrostructures: damask.util.croak('error. Microstructure count mismatch') @@ -273,8 +272,8 @@ sys.stdout.flush() initialGeomVFile.close() # start mulithreaded monte carlo simulation -threads=[] -s=threading.Semaphore(1) +threads = [] +s = threading.Semaphore(1) for i in range(options.threads): threads.append(myThread(i)) diff --git a/processing/pre/seeds_fromGeom.py b/processing/pre/seeds_fromGeom.py index 97550ce13..95f0d3815 100755 --- a/processing/pre/seeds_fromGeom.py +++ b/processing/pre/seeds_fromGeom.py @@ -46,7 +46,7 @@ options.blacklist = [int(i) for i in options.blacklist] for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) microstructure = geom.get_microstructure().reshape((-1,1),order='F') mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \ @@ -63,6 +63,6 @@ for name in filenames: 'origin\tx {}\ty {}\tz {}'.format(*geom.origin), 'homogenization\t{}'.format(geom.homogenization)] - table = damask.Table(seeds[mask],{'pos':(3,)},comments) - table = table.add('microstructure',microstructure[mask]) - table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds') + damask.Table(seeds[mask],{'pos':(3,)},comments)\ + .add('microstructure',microstructure[mask].astype(int))\ + .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds') diff --git a/processing/pre/seeds_fromPokes.py b/processing/pre/seeds_fromPokes.py index 78172fc23..5deb0ab1a 100755 --- a/processing/pre/seeds_fromPokes.py +++ b/processing/pre/seeds_fromPokes.py @@ -52,7 +52,7 @@ options.box = np.array(options.box).reshape(3,2) for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int) box = np.amax(options.box, axis=1) \ @@ -91,6 +91,6 @@ for name in filenames: 'homogenization\t{}'.format(geom.homogenization)] table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) - table.set('microstructure',table.get('microstructure').astype(np.int)) - table.to_file(sys.stdout if name is None else \ - os.path.splitext(name)[0]+f'_poked_{options.N}.seeds') + table.set('microstructure',table.get('microstructure').astype(np.int))\ + .save_ASCII(sys.stdout if name is None else \ + os.path.splitext(name)[0]+f'_poked_{options.N}.seeds') diff --git a/processing/pre/seeds_fromRandom.py b/processing/pre/seeds_fromRandom.py index a544528cf..c1d454f67 100755 --- a/processing/pre/seeds_fromRandom.py +++ b/processing/pre/seeds_fromRandom.py @@ -154,12 +154,12 @@ for name in filenames: 'randomSeed\t{}'.format(options.randomSeed), ] - table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments) - table = table.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int)) + table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)\ + .add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int)) if options.weights: weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \ else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N) table = table.add('weight',weights) - table.to_file(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name) diff --git a/python/damask/_colormap.py b/python/damask/_colormap.py index fccb8642b..f3065ebc8 100644 --- a/python/damask/_colormap.py +++ b/python/damask/_colormap.py @@ -235,100 +235,128 @@ class Colormap(mpl.colors.ListedColormap): return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name) - def to_file(self,fname=None,format='ParaView'): + + def save_paraview(self,fname=None): """ - Export colormap to file for use in external programs. + Write colormap to JSON file for Paraview. Parameters ---------- fname : file, str, or pathlib.Path, optional. Filename to store results. If not given, the filename will - consist of the name of the colormap and an extension that - depends on the file format. - format : {'ParaView', 'ASCII', 'GOM', 'gmsh'}, optional - File format, defaults to 'ParaView'. Available formats are: - - ParaView: JSON file, extension '.json'. - - ASCII: Plain text file, extension '.txt'. - - GOM: Aramis GOM (DIC), extension '.legend'. - - Gmsh: Gmsh FEM mesh-generator, extension '.msh'. + consist of the name of the colormap and extension '.json'. """ if fname is not None: try: - f = open(fname,'w') + fhandle = open(fname,'w') except TypeError: - f = fname + fhandle = fname else: - f = None + fhandle = None - if format.lower() == 'paraview': - Colormap._export_paraview(self,f) - elif format.lower() == 'ascii': - Colormap._export_ASCII(self,f) - elif format.lower() == 'gom': - Colormap._export_GOM(self,f) - elif format.lower() == 'gmsh': - Colormap._export_gmsh(self,f) - else: - raise ValueError('Unknown output format: {format}.') - - @staticmethod - def _export_paraview(colormap,fhandle=None): - """Write colormap to JSON file for Paraview.""" colors = [] - for i,c in enumerate(np.round(colormap.colors,6).tolist()): + for i,c in enumerate(np.round(self.colors,6).tolist()): colors+=[i]+c out = [{ 'Creator':util.execution_stamp('Colormap'), 'ColorSpace':'RGB', - 'Name':colormap.name, + 'Name':self.name, 'DefaultMap':True, 'RGBPoints':colors }] if fhandle is None: - with open(colormap.name.replace(' ','_')+'.json', 'w') as f: + with open(self.name.replace(' ','_')+'.json', 'w') as f: json.dump(out, f,indent=4) else: json.dump(out,fhandle,indent=4) - @staticmethod - def _export_ASCII(colormap,fhandle=None): - """Write colormap to ASCII table.""" - labels = {'RGBA':4} if colormap.colors.shape[1] == 4 else {'RGB': 3} - t = Table(colormap.colors,labels,f'Creator: {util.execution_stamp("Colormap")}') + + def save_ASCII(self,fname=None): + """ + Write colormap to ASCII table. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.txt'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None + + labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3} + t = Table(self.colors,labels,f'Creator: {util.execution_stamp("Colormap")}') if fhandle is None: - with open(colormap.name.replace(' ','_')+'.txt', 'w') as f: - t.to_file(f,new_style=True) + with open(self.name.replace(' ','_')+'.txt', 'w') as f: + t.save_ASCII(f) else: - t.to_file(fhandle,new_style=True) + t.save_ASCII(fhandle) - @staticmethod - def _export_GOM(colormap,fhandle=None): - """Write colormap to GOM Aramis compatible format.""" + + def save_GOM(self,fname=None): + """ + Write colormap to GOM Aramis compatible format. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.legend'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None # ToDo: test in GOM - GOM_str = f'1 1 {colormap.name.replace(" ","_")} 9 {colormap.name.replace(" ","_")} ' \ + GOM_str = '1 1 {name} 9 {name} '.format(name=self.name.replace(" ","_")) \ + '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \ - + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colormap.colors)}' \ - + ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((colormap.colors*255).astype(int))]) \ + + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(self.colors)}' \ + + ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((self.colors*255).astype(int))]) \ + '\n' if fhandle is None: - with open(colormap.name.replace(' ','_')+'.legend', 'w') as f: + with open(self.name.replace(' ','_')+'.legend', 'w') as f: f.write(GOM_str) else: fhandle.write(GOM_str) - @staticmethod - def _export_gmsh(colormap,fhandle=None): - """Write colormap to Gmsh compatible format.""" + def save_gmsh(self,fname=None): + """ + Write colormap to Gmsh compatible format. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.msh'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None # ToDo: test in gmsh gmsh_str = 'View.ColorTable = {\n' \ - +'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in colormap.colors[:,:3]*255]) \ + +'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in self.colors[:,:3]*255]) \ +'\n}\n' if fhandle is None: - with open(colormap.name.replace(' ','_')+'.msh', 'w') as f: + with open(self.name.replace(' ','_')+'.msh', 'w') as f: f.write(gmsh_str) else: fhandle.write(gmsh_str) diff --git a/python/damask/_geom.py b/python/damask/_geom.py index f5dc05f2f..8fa0b533a 100644 --- a/python/damask/_geom.py +++ b/python/damask/_geom.py @@ -284,7 +284,7 @@ class Geom: @staticmethod - def from_file(fname): + def load_ASCII(fname): """ Read a geom file. @@ -350,7 +350,7 @@ class Geom: @staticmethod - def from_vtr(fname): + def load_vtr(fname): """ Read a VTK rectilinear grid. @@ -361,7 +361,7 @@ class Geom: Valid extension is .vtr, it will be appended if not given. """ - v = VTK.from_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') + v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') comments = v.get_comments() grid = np.array(v.vtk_data.GetDimensions())-1 bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T @@ -447,128 +447,106 @@ class Geom: ) - def to_file(self,fname,format='vtr',pack=None): + def save_ASCII(self,fname,pack=None): """ Writes a geom file. Parameters ---------- + geom : Geom object + Geometry to write. fname : str or file handle - Geometry file to write. - format : {'vtr', 'ASCII'}, optional - File format, defaults to 'vtr'. Available formats are: - - vtr: VTK rectilinear grid file, extension '.vtr'. - - ASCII: Plain text file, extension '.geom'. + Geometry file to write with extension '.geom'. pack : bool, optional - Compress ASCII geometry with 'x of y' and 'a to b'. + Compress geometry with 'x of y' and 'a to b'. """ - def _to_ASCII(geom,fname,pack=None): - """ - Writes a geom file. + header = [f'{len(self.comments)+4} header'] + self.comments + header.append('grid a {} b {} c {}'.format(*self.get_grid())) + header.append('size x {} y {} z {}'.format(*self.get_size())) + header.append('origin x {} y {} z {}'.format(*self.get_origin())) + header.append(f'homogenization {self.get_homogenization()}') - Parameters - ---------- - geom : Geom object - Geometry to write. - fname : str or file handle - Geometry file to write. - pack : bool, optional - Compress geometry with 'x of y' and 'a to b'. + grid = self.get_grid() - """ - header = [f'{len(geom.comments)+4} header'] + geom.comments - header.append('grid a {} b {} c {}'.format(*geom.get_grid())) - header.append('size x {} y {} z {}'.format(*geom.get_size())) - header.append('origin x {} y {} z {}'.format(*geom.get_origin())) - header.append(f'homogenization {geom.get_homogenization()}') - - grid = geom.get_grid() - - if pack is None: - plain = grid.prod()/geom.N_microstructure < 250 - else: - plain = not pack - - if plain: - format_string = '%g' if geom.microstructure.dtype in np.sctypes['float'] else \ - '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(geom.microstructure))))) - np.savetxt(fname, - geom.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, - header='\n'.join(header), fmt=format_string, comments='') - else: - try: - f = open(fname,'w') - except TypeError: - f = fname - - compressType = None - former = start = -1 - reps = 0 - for current in geom.microstructure.flatten('F'): - if abs(current - former) == 1 and (start - current) == reps*(former - current): - compressType = 'to' - reps += 1 - elif current == former and start == former: - compressType = 'of' - reps += 1 - else: - if compressType is None: - f.write('\n'.join(header)+'\n') - elif compressType == '.': - f.write(f'{former}\n') - elif compressType == 'to': - f.write(f'{start} to {former}\n') - elif compressType == 'of': - f.write(f'{reps} of {former}\n') - - compressType = '.' - start = current - reps = 1 - - former = current - - if compressType == '.': - f.write(f'{former}\n') - elif compressType == 'to': - f.write(f'{start} to {former}\n') - elif compressType == 'of': - f.write(f'{reps} of {former}\n') - - - def _to_vtr(geom,fname=None): - """ - Generates vtk rectilinear grid. - - Parameters - ---------- - geom : Geom object - Geometry to write. - fname : str, optional - Filename to write. If no file is given, a string is returned. - Valid extension is .vtr, it will be appended if not given. - - """ - v = VTK.from_rectilinearGrid(geom.grid,geom.size,geom.origin) - v.add(geom.microstructure.flatten(order='F'),'materialpoint') - v.add_comments(geom.comments) - - if fname: - v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') - else: - sys.stdout.write(v.__repr__()) - - if format.lower() == 'ascii': - return _to_ASCII(self,fname,pack) - elif format.lower() == 'vtr': - return _to_vtr(self,fname) + if pack is None: + plain = grid.prod()/self.N_microstructure < 250 else: - raise TypeError(f'Unknown format {format}.') + plain = not pack + + if plain: + format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \ + '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) + np.savetxt(fname, + self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, + header='\n'.join(header), fmt=format_string, comments='') + else: + try: + f = open(fname,'w') + except TypeError: + f = fname + + compressType = None + former = start = -1 + reps = 0 + for current in self.microstructure.flatten('F'): + if abs(current - former) == 1 and (start - current) == reps*(former - current): + compressType = 'to' + reps += 1 + elif current == former and start == former: + compressType = 'of' + reps += 1 + else: + if compressType is None: + f.write('\n'.join(header)+'\n') + elif compressType == '.': + f.write(f'{former}\n') + elif compressType == 'to': + f.write(f'{start} to {former}\n') + elif compressType == 'of': + f.write(f'{reps} of {former}\n') + + compressType = '.' + start = current + reps = 1 + + former = current + + if compressType == '.': + f.write(f'{former}\n') + elif compressType == 'to': + f.write(f'{start} to {former}\n') + elif compressType == 'of': + f.write(f'{reps} of {former}\n') + + + def save_vtr(self,fname=None): + """ + Generates vtk rectilinear grid. + + Parameters + ---------- + geom : Geom object + Geometry to write with extension '.vtr'. + fname : str, optional + Filename to write. If no file is given, a string is returned. + Valid extension is .vtr, it will be appended if not given. + + """ + v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) + v.add(self.microstructure.flatten(order='F'),'materialpoint') + v.add_comments(self.comments) + + if fname: + v.save(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') + else: + sys.stdout.write(v.__repr__()) + def as_ASCII(self,pack=False): """Format geometry as human-readable ASCII.""" f = StringIO() - self.to_file(f,'ASCII',pack) + self.save_ASCII(f,pack) f.seek(0) return ''.join(f.readlines()) diff --git a/python/damask/_result.py b/python/damask/_result.py index 723a9f3eb..02d0c0abe 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1196,7 +1196,7 @@ class Result: f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml()) - def to_vtk(self,labels=[],mode='cell'): + def save_vtk(self,labels=[],mode='cell'): """ Export to vtk cell/point data. @@ -1268,4 +1268,4 @@ class Result: u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p')) v.add(u,'u') - v.to_file(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}') + v.save(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}') diff --git a/python/damask/_table.py b/python/damask/_table.py index b4fd2975a..987233153 100644 --- a/python/damask/_table.py +++ b/python/damask/_table.py @@ -27,7 +27,7 @@ class Table: self.comments = [] if comments_ is None else [c for c in comments_] self.data = pd.DataFrame(data=data) self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() } - self._label_condensed() + self._label_uniform() def __copy__(self): @@ -39,7 +39,7 @@ class Table: return self.__copy__() - def _label_flat(self): + def _label_discrete(self): """Label data individually, e.g. v v v ==> 1_v 2_v 3_v.""" labels = [] for label,shape in self.shapes.items(): @@ -48,8 +48,8 @@ class Table: self.data.columns = labels - def _label_condensed(self): - """Label data condensed, e.g. 1_v 2_v 3_v ==> v v v.""" + def _label_uniform(self): + """Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v.""" labels = [] for label,shape in self.shapes.items(): labels += [label] * int(np.prod(shape)) @@ -64,12 +64,15 @@ class Table: @staticmethod - def from_ASCII(fname): + def load_ASCII(fname): """ Create table from ASCII file. - The first line can indicate the number of subsequent header lines as 'n header', - alternatively first line is the header and comments are marked by '#' ('new style'). + In legacy style, the first line indicates the number of + subsequent header lines as "N header", with the last header line being + interpreted as column labels. + Alternatively, initial comments are marked by '#', with the first non-comment line + containing the column labels. Vector data column labels are indicated by '1_v, 2_v, ..., n_v'. Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'. @@ -119,7 +122,7 @@ class Table: return Table(data,shapes,comments) @staticmethod - def from_ang(fname): + def load_ang(fname): """ Create table from TSL ang file. @@ -289,9 +292,9 @@ class Table: """ dup = self.copy() - dup._label_flat() + dup._label_discrete() dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending) - dup._label_condensed() + dup._label_uniform() dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}') return dup @@ -338,59 +341,40 @@ class Table: return dup - def to_file(self,fname,format='ASCII',new_style=False): + def save_ASCII(table,fname,legacy=False): """ Store as plain text file. Parameters ---------- + table : Table object + Table to write. fname : file, str, or pathlib.Path Filename or file for writing. - format : {ASCII'}, optional - File format, defaults to 'ASCII'. Available formats are: - - ASCII: Plain text file, extension '.txt'. - new_style : Boolean, optional - Write table in new style, indicating header lines by comment sign ('#') only. + legacy : Boolean, optional + Write table in legacy style, indicating header lines by "N header" + in contrast to using comment sign ('#') at beginning of lines. """ - def _to_ASCII(table,fname,new_style=False): - """ - Store as plain text file. + seen = set() + labels = [] + for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]: + if table.shapes[l] == (1,): + labels.append(f'{l}') + elif len(table.shapes[l]) == 1: + labels += [f'{i+1}_{l}' \ + for i in range(table.shapes[l][0])] + else: + labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \ + for i in range(np.prod(table.shapes[l]))] - Parameters - ---------- - table : Table object - Table to write. - fname : file, str, or pathlib.Path - Filename or file for writing. - new_style : Boolean, optional - Write table in new style, indicating header lines by comment sign ('#') only. + header = ([f'{len(table.comments)+1} header'] + table.comments) if legacy else \ + [f'# {comment}' for comment in table.comments] - """ - seen = set() - labels = [] - for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]: - if table.shapes[l] == (1,): - labels.append(f'{l}') - elif len(table.shapes[l]) == 1: - labels += [f'{i+1}_{l}' \ - for i in range(table.shapes[l][0])] - else: - labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \ - for i in range(np.prod(table.shapes[l]))] + try: + f = open(fname,'w') + except TypeError: + f = fname - header = [f'# {comment}' for comment in table.comments] if new_style else \ - [f'{len(table.comments)+1} header'] + table.comments - - try: - f = open(fname,'w') - except TypeError: - f = fname - - for line in header + [' '.join(labels)]: f.write(line+'\n') - table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False) - - if format.lower() == 'ascii': - return _to_ASCII(self,fname,new_style) - else: - raise TypeError(f'Unknown format {format}.') + for line in header + [' '.join(labels)]: f.write(line+'\n') + table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False) diff --git a/python/damask/_test.py b/python/damask/_test.py index 78616fabf..c75266e72 100644 --- a/python/damask/_test.py +++ b/python/damask/_test.py @@ -228,7 +228,7 @@ class Test: def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): - source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) + source = os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) if len(targetfiles) == 0: targetfiles = sourcefiles for i,f in enumerate(sourcefiles): try: @@ -287,30 +287,30 @@ class Test: import numpy as np logging.info('\n '.join(['comparing',File1,File2])) - table = damask.Table.from_ASCII(File1) - len1=len(table.comments)+2 - table = damask.Table.from_ASCII(File2) - len2=len(table.comments)+2 + table = damask.Table.load_ASCII(File1) + len1 = len(table.comments)+1 + table = damask.Table.load_ASCII(File2) + len2 = len(table.comments)+1 refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True)) curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True)) if len(curArray) == len(refArray): refArrayNonZero = refArray[refArray.nonzero()] - curArray = curArray[refArray.nonzero()] - max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) - max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) + curArray = curArray[refArray.nonzero()] + max_err = np. max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) + max_loc = np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) refArrayNonZero = refArrayNonZero[curArray.nonzero()] - curArray = curArray[curArray.nonzero()] + curArray = curArray[curArray.nonzero()] print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********') return max_err else: - raise Exception('mismatch in array size to compare') + raise Exception(f'mismatch in array sizes ({len(refArray)} and {len(curArray)}) to compare') def compare_ArrayRefCur(self,ref,cur=''): - if cur =='': cur = ref + if cur == '': cur = ref refName = self.fileInReference(ref) curName = self.fileInCurrent(cur) return self.compare_Array(refName,curName) @@ -399,10 +399,8 @@ class Test: if any(norm[i]) == 0.0 or absTol[i]: norm[i] = [1.0 for j in range(line0-len(skipLines))] absTol[i] = True - if perLine: - logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance") - else: - logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance") + logging.warning(f'''{"At least one" if perLine else "Maximum"} norm of + "{headings0[i]['label']}" in first table is 0.0, using absolute tolerance''') line1 = 0 while table1.data_read(): # read next data line of ASCII table @@ -418,20 +416,18 @@ class Test: logging.info(' ********') for i in range(dataLength): - if absTol[i]: - logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}") - else: - logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}") + logging.info(f''' * maximum {'absolute' if absTol[i] else 'relative'} error {maxError[i]} + between {headings0[i]['label']} and {headings1[i]['label']}''') logging.info(' ********') return maxError def compare_TablesStatistically(self, - files = [None,None], # list of file names - columns = [None], # list of list of column labels (per file) - meanTol = 1.0e-4, - stdTol = 1.0e-6, - preFilter = 1.0e-9): + files = [None,None], # list of file names + columns = [None], # list of list of column labels (per file) + meanTol = 1.0e-4, + stdTol = 1.0e-6, + preFilter = 1.0e-9): """ Calculate statistics of tables. @@ -440,9 +436,9 @@ class Test: if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] - tables = [damask.Table.from_ASCII(filename) for filename in files] + tables = [damask.Table.load_ASCII(filename) for filename in files] for table in tables: - table._label_flat() + table._label_discrete() columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files columns = columns[:len(files)] # truncate to same length as files @@ -462,7 +458,7 @@ class Test: data = [] for table,labels in zip(tables,columns): - table._label_condensed() + table._label_uniform() data.append(np.hstack(list(table.get(label) for label in labels))) @@ -471,12 +467,11 @@ class Test: normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 normedDelta = np.where(normBy>preFilter,delta/normBy,0.0) mean = np.amax(np.abs(np.mean(normedDelta,0))) - std = np.amax(np.std(normedDelta,0)) + std = np.amax(np.std(normedDelta,0)) logging.info(f'mean: {mean:f}') logging.info(f'std: {std:f}') - return (mean 1: - current.to_file(reference,format='vtr') + current.save_vtr(reference) for _ in range(10): time.sleep(.2) if os.path.exists(reference.with_suffix('.vtr')): break - assert geom_equal(Geom.from_vtr(reference) if stencil > 1 else default, + assert geom_equal(Geom.load_vtr(reference) if stencil > 1 else default, current ) @@ -220,8 +216,8 @@ class TestGeom: modified = default.scale(grid) tag = f'grid={util.srepr(grid,"-")}' reference = reference_dir/f'scale_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) def test_renumber(self,default): @@ -255,8 +251,8 @@ class TestGeom: modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True)) tag = f'Eulers={util.srepr(Eulers,"-")}' reference = reference_dir/f'rotate_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) def test_canvas(self,default): diff --git a/python/tests/test_Orientation.py b/python/tests/test_Orientation.py index 9a23dc0ed..1c92340bf 100644 --- a/python/tests/test_Orientation.py +++ b/python/tests/test_Orientation.py @@ -107,7 +107,7 @@ class TestOrientation: table = Table(eu,{'Eulers':(3,)}) table = table.add('pos',coords) table.to_ASCII(reference) - assert np.allclose(eu,Table.from_ASCII(reference).get('Eulers')) + assert np.allclose(eu,Table.load_ASCII(reference).get('Eulers')) @pytest.mark.parametrize('lattice',Lattice.lattices) def test_disorientation360(self,lattice): @@ -129,4 +129,3 @@ class TestOrientation: eqs = [r for r in R_1.equivalent] R_2 = Orientation.from_average(eqs) assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion) - diff --git a/python/tests/test_Result.py b/python/tests/test_Result.py index 6000f50f9..c25bf7a4c 100644 --- a/python/tests/test_Result.py +++ b/python/tests/test_Result.py @@ -339,7 +339,7 @@ class TestResult: @pytest.mark.parametrize('output',['F',[],['F','P']]) def test_vtk(self,tmp_path,default,output): os.chdir(tmp_path) - default.to_vtk(output) + default.save_vtk(output) def test_XDMF(self,tmp_path,single_phase): os.chdir(tmp_path) diff --git a/python/tests/test_Table.py b/python/tests/test_Table.py index 1763e27ef..af940a037 100644 --- a/python/tests/test_Table.py +++ b/python/tests/test_Table.py @@ -35,50 +35,50 @@ class TestTable: @pytest.mark.parametrize('mode',['str','path']) def test_write_read(self,default,tmpdir,mode): - default.to_file(tmpdir/'default.txt') + default.save_ASCII(tmpdir/'default.txt') if mode == 'path': - new = Table.from_ASCII(tmpdir/'default.txt') + new = Table.load_ASCII(tmpdir/'default.txt') elif mode == 'str': - new = Table.from_ASCII(str(tmpdir/'default.txt')) + new = Table.load_ASCII(str(tmpdir/'default.txt')) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_read_file(self,default,tmpdir): with open(tmpdir/'default.txt','w') as f: - default.to_file(f) + default.save_ASCII(f) with open(tmpdir/'default.txt') as f: - new = Table.from_ASCII(f) + new = Table.load_ASCII(f) assert all(default.data==new.data) and default.shapes == new.shapes - def test_write_read_new_style(self,default,tmpdir): - with open(tmpdir/'new_style.txt','w') as f: - default.to_file(f,new_style=True) - with open(tmpdir/'new_style.txt') as f: - new = Table.from_ASCII(f) + def test_write_read_legacy_style(self,default,tmpdir): + with open(tmpdir/'legacy.txt','w') as f: + default.save_ASCII(f,legacy=True) + with open(tmpdir/'legacy.txt') as f: + new = Table.load_ASCII(f) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_invalid_format(self,default,tmpdir): with pytest.raises(TypeError): - default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid') + default.save_ASCII(tmpdir/'shouldnotbethere.txt',format='invalid') @pytest.mark.parametrize('mode',['str','path']) def test_read_ang(self,reference_dir,mode): if mode == 'path': - new = Table.from_ang(reference_dir/'simple.ang') + new = Table.load_ang(reference_dir/'simple.ang') elif mode == 'str': - new = Table.from_ang(str(reference_dir/'simple.ang')) + new = Table.load_ang(str(reference_dir/'simple.ang')) assert new.data.shape == (4,10) and \ new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit'] def test_read_ang_file(self,reference_dir): f = open(reference_dir/'simple.ang') - new = Table.from_ang(f) + new = Table.load_ang(f) assert new.data.shape == (4,10) and \ new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit'] @pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt']) def test_read_strange(self,reference_dir,fname): with open(reference_dir/fname) as f: - Table.from_ASCII(f) + Table.load_ASCII(f) def test_set(self,default): d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F') @@ -166,7 +166,7 @@ class TestTable: x = np.random.random((5,12)) t = Table(x,{'F':(3,3),'v':(3,)},['random test data']) unsort = t.get('4_F') - sort = t.sort_by('4_F').get('4_F') + sort = t.sort_by('4_F').get('4_F') assert np.all(np.sort(unsort,0)==sort) def test_sort_revert(self): @@ -179,6 +179,6 @@ class TestTable: t = Table(np.array([[0,1,],[2,1,]]), {'v':(2,)}, ['test data'])\ - .add('s',np.array(['b','a']))\ - .sort_by('s') + .add('s',np.array(['b','a']))\ + .sort_by('s') assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1)) diff --git a/python/tests/test_VTK.py b/python/tests/test_VTK.py index f6be4da25..81c9eb772 100644 --- a/python/tests/test_VTK.py +++ b/python/tests/test_VTK.py @@ -32,22 +32,22 @@ class TestVTK: origin = np.random.random(3) v = VTK.from_rectilinearGrid(grid,size,origin) string = v.__repr__() - v.to_file(tmp_path/'rectilinearGrid',False) - vtr = VTK.from_file(tmp_path/'rectilinearGrid.vtr') + v.save(tmp_path/'rectilinearGrid',False) + vtr = VTK.load(tmp_path/'rectilinearGrid.vtr') with open(tmp_path/'rectilinearGrid.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid') + vtk = VTK.load(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid') assert(string == vtr.__repr__() == vtk.__repr__()) def test_polyData(self,tmp_path): points = np.random.rand(100,3) v = VTK.from_polyData(points) string = v.__repr__() - v.to_file(tmp_path/'polyData',False) - vtp = VTK.from_file(tmp_path/'polyData.vtp') + v.save(tmp_path/'polyData',False) + vtp = VTK.load(tmp_path/'polyData.vtp') with open(tmp_path/'polyData.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'polyData.vtk','polyData') + vtk = VTK.load(tmp_path/'polyData.vtk','polyData') assert(string == vtp.__repr__() == vtk.__repr__()) @pytest.mark.parametrize('cell_type,n',[ @@ -62,11 +62,11 @@ class TestVTK: connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n) v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type) string = v.__repr__() - v.to_file(tmp_path/'unstructuredGrid',False) - vtu = VTK.from_file(tmp_path/'unstructuredGrid.vtu') + v.save(tmp_path/'unstructuredGrid',False) + vtu = VTK.load(tmp_path/'unstructuredGrid.vtu') with open(tmp_path/'unstructuredGrid.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'unstructuredGrid.vtk','unstructuredgrid') + vtk = VTK.load(tmp_path/'unstructuredGrid.vtk','unstructuredgrid') assert(string == vtu.__repr__() == vtk.__repr__()) @@ -75,8 +75,8 @@ class TestVTK: v = VTK.from_polyData(points) fname_s = tmp_path/'single.vtp' fname_p = tmp_path/'parallel.vtp' - v.to_file(fname_s,False) - v.to_file(fname_p,True) + v.save(fname_s,False) + v.save(fname_p,True) for i in range(10): if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p): assert(True) @@ -90,11 +90,11 @@ class TestVTK: ('this_file_does_not_exist.vtx', None)]) def test_invalid_dataset_type(self,name,dataset_type): with pytest.raises(TypeError): - VTK.from_file(name,dataset_type) + VTK.load(name,dataset_type) def test_invalid_extension_write(self,default): with pytest.raises(ValueError): - default.to_file('default.txt') + default.save('default.txt') def test_invalid_get(self,default): with pytest.raises(ValueError): @@ -115,8 +115,8 @@ class TestVTK: def test_comments(self,tmp_path,default): default.add_comments(['this is a comment']) - default.to_file(tmp_path/'with_comments',parallel=False) - new = VTK.from_file(tmp_path/'with_comments.vtr') + default.save(tmp_path/'with_comments',parallel=False) + new = VTK.load(tmp_path/'with_comments.vtr') assert new.get_comments() == ['this is a comment'] def test_compare_reference_polyData(self,update,reference_dir,tmp_path): @@ -124,9 +124,9 @@ class TestVTK: polyData = VTK.from_polyData(points) polyData.add(points,'coordinates') if update: - polyData.to_file(reference_dir/'polyData') + polyData.save(reference_dir/'polyData') else: - reference = VTK.from_file(reference_dir/'polyData.vtp') + reference = VTK.load(reference_dir/'polyData.vtp') assert polyData.__repr__() == reference.__repr__() and \ np.allclose(polyData.get('coordinates'),points) @@ -139,8 +139,8 @@ class TestVTK: rectilinearGrid.add(c,'cell') rectilinearGrid.add(n,'node') if update: - rectilinearGrid.to_file(reference_dir/'rectilinearGrid') + rectilinearGrid.save(reference_dir/'rectilinearGrid') else: - reference = VTK.from_file(reference_dir/'rectilinearGrid.vtr') + reference = VTK.load(reference_dir/'rectilinearGrid.vtr') assert rectilinearGrid.__repr__() == reference.__repr__() and \ np.allclose(rectilinearGrid.get('cell'),c) From 2114c510983bf56aa6eaf9896a9a994884bdc2a8 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 15 Sep 2020 01:47:26 -0400 Subject: [PATCH 02/27] revert mistakenly made header length change in array-comparison --- python/damask/_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/damask/_test.py b/python/damask/_test.py index c75266e72..ab4df6d68 100644 --- a/python/damask/_test.py +++ b/python/damask/_test.py @@ -288,9 +288,9 @@ class Test: import numpy as np logging.info('\n '.join(['comparing',File1,File2])) table = damask.Table.load_ASCII(File1) - len1 = len(table.comments)+1 + len1 = len(table.comments)+2 table = damask.Table.load_ASCII(File2) - len2 = len(table.comments)+1 + len2 = len(table.comments)+2 refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True)) curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True)) @@ -331,7 +331,7 @@ class Test: logging.info('\n '.join(['comparing ASCII Tables',file0,file1])) if normHeadings == '': normHeadings = headings0 -# check if comparison is possible and determine lenght of columns +# check if comparison is possible and determine length of columns if len(headings0) == len(headings1) == len(normHeadings): dataLength = len(headings0) length = [1 for i in range(dataLength)] From f52aae3c29ca6e165c6a37244b7fb72895da4ae9 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 15 Sep 2020 10:19:19 -0400 Subject: [PATCH 03/27] legacy shell scripts again output legacy ASCIItables --- processing/post/addCompatibilityMismatch.py | 2 +- processing/post/addCurl.py | 2 +- processing/post/addDerivative.py | 2 +- processing/post/addDisplacement.py | 4 ++-- processing/post/addDivergence.py | 2 +- processing/post/addEuclideanDistance.py | 2 +- processing/post/addGaussian.py | 2 +- processing/post/addGradient.py | 2 +- processing/post/addOrientations.py | 2 +- processing/post/addSchmidfactors.py | 2 +- processing/post/permuteData.py | 2 +- 11 files changed, 12 insertions(+), 12 deletions(-) diff --git a/processing/post/addCompatibilityMismatch.py b/processing/post/addCompatibilityMismatch.py index e274092d2..40a1391e0 100755 --- a/processing/post/addCompatibilityMismatch.py +++ b/processing/post/addCompatibilityMismatch.py @@ -191,4 +191,4 @@ for name in filenames: volumeMismatch.reshape(-1,1,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 9adb06a00..5a5f4c074 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -55,4 +55,4 @@ for name in filenames: curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDerivative.py b/processing/post/addDerivative.py index 179715e23..63a5bf73e 100755 --- a/processing/post/addDerivative.py +++ b/processing/post/addDerivative.py @@ -71,4 +71,4 @@ for name in filenames: derivative(table.get(options.coordinates),table.get(label)), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index 8dc3051d3..079bcd970 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -60,7 +60,7 @@ for name in filenames: .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') + .save_ASCII((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True) else: table.add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), @@ -68,4 +68,4 @@ for name in filenames: .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII(sys.stdout if name is None else name) + .save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDivergence.py b/processing/post/addDivergence.py index b4c0bb7b4..bc4880788 100755 --- a/processing/post/addDivergence.py +++ b/processing/post/addDivergence.py @@ -55,4 +55,4 @@ for name in filenames: div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index 305fc2ad5..7e99dc3d5 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -184,4 +184,4 @@ for name in filenames: distance[i,:], scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGaussian.py b/processing/post/addGaussian.py index 708617c1f..095a17bc2 100755 --- a/processing/post/addGaussian.py +++ b/processing/post/addGaussian.py @@ -73,4 +73,4 @@ for name in filenames: mode = 'wrap' if options.periodic else 'nearest'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGradient.py b/processing/post/addGradient.py index e216ed34c..69241598c 100755 --- a/processing/post/addGradient.py +++ b/processing/post/addGradient.py @@ -55,4 +55,4 @@ for name in filenames: grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index 3a320241c..16f8d62d6 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -147,4 +147,4 @@ for name in filenames: if 'axisangle' in options.output: table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index 40db4da34..beaf18331 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -189,4 +189,4 @@ for name in filenames: for i,label in enumerate(labels): table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/permuteData.py b/processing/post/permuteData.py index af184924d..34451404c 100755 --- a/processing/post/permuteData.py +++ b/processing/post/permuteData.py @@ -58,4 +58,4 @@ for name in filenames: rng.shuffle(uniques) table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name), legacy=True) From b57e8e69a87f66e1fb81eca0e1236125fd5432b4 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 15 Sep 2020 22:38:50 -0400 Subject: [PATCH 04/27] switched save_ASCII() in more legacy scripts to legacy mode --- PRIVATE | 2 +- processing/legacy/addAPS34IDEstrainCoords.py | 2 +- processing/legacy/addCumulative.py | 2 +- processing/post/DADF5_postResults.py | 2 +- processing/pre/seeds_fromDistribution.py | 2 +- processing/pre/seeds_fromGeom.py | 2 +- processing/pre/seeds_fromPokes.py | 2 +- processing/pre/seeds_fromRandom.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/PRIVATE b/PRIVATE index 5b7c34e58..3e789fc51 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 5b7c34e586589141d1e061f021512e65308fc1f7 +Subproject commit 3e789fc515dc2bdc63e042286dc06d8d072d096f diff --git a/processing/legacy/addAPS34IDEstrainCoords.py b/processing/legacy/addAPS34IDEstrainCoords.py index c82ff35a3..8f566b614 100755 --- a/processing/legacy/addAPS34IDEstrainCoords.py +++ b/processing/legacy/addAPS34IDEstrainCoords.py @@ -48,4 +48,4 @@ for name in filenames: coord[:,2] += table.get(options.depth)[:,0] table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII(sys.stdout if name is None else name) + .save_ASCII((sys.stdout if name is None else name),legacy=True) diff --git a/processing/legacy/addCumulative.py b/processing/legacy/addCumulative.py index 14d8bcfea..0d88cc575 100755 --- a/processing/legacy/addCumulative.py +++ b/processing/legacy/addCumulative.py @@ -45,4 +45,4 @@ for name in filenames: np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII((sys.stdout if name is None else name),legacy=True) diff --git a/processing/post/DADF5_postResults.py b/processing/post/DADF5_postResults.py index fa9df27b0..0226ad551 100755 --- a/processing/post/DADF5_postResults.py +++ b/processing/post/DADF5_postResults.py @@ -60,4 +60,4 @@ for filename in options.filenames: os.mkdir(dirname,0o755) file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0], inc[3:].zfill(N_digits)) - table.save_ASCII(os.path.join(dirname,file_out)) + table.save_ASCII(os.path.join(dirname,file_out),legacy=True) diff --git a/processing/pre/seeds_fromDistribution.py b/processing/pre/seeds_fromDistribution.py index 1cf35450c..cc72920d3 100755 --- a/processing/pre/seeds_fromDistribution.py +++ b/processing/pre/seeds_fromDistribution.py @@ -89,7 +89,7 @@ class myThread (threading.Thread): coords[i]=newCoords direction[i]*=2. i+= 1 - perturbedSeedsTable.set('pos',coords).save_ASCII(perturbedSeedsVFile) + perturbedSeedsTable.set('pos',coords).save_ASCII(perturbedSeedsVFile,legacy=True) #--- do tesselation with perturbed seed file ------------------------------------------------------ perturbedGeomVFile.close() diff --git a/processing/pre/seeds_fromGeom.py b/processing/pre/seeds_fromGeom.py index 95f0d3815..4110405d6 100755 --- a/processing/pre/seeds_fromGeom.py +++ b/processing/pre/seeds_fromGeom.py @@ -65,4 +65,4 @@ for name in filenames: damask.Table(seeds[mask],{'pos':(3,)},comments)\ .add('microstructure',microstructure[mask].astype(int))\ - .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds') + .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True) diff --git a/processing/pre/seeds_fromPokes.py b/processing/pre/seeds_fromPokes.py index 5deb0ab1a..fdff40da3 100755 --- a/processing/pre/seeds_fromPokes.py +++ b/processing/pre/seeds_fromPokes.py @@ -93,4 +93,4 @@ for name in filenames: table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) table.set('microstructure',table.get('microstructure').astype(np.int))\ .save_ASCII(sys.stdout if name is None else \ - os.path.splitext(name)[0]+f'_poked_{options.N}.seeds') + os.path.splitext(name)[0]+f'_poked_{options.N}.seeds',legacy=True) diff --git a/processing/pre/seeds_fromRandom.py b/processing/pre/seeds_fromRandom.py index c1d454f67..8700a4648 100755 --- a/processing/pre/seeds_fromRandom.py +++ b/processing/pre/seeds_fromRandom.py @@ -162,4 +162,4 @@ for name in filenames: else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N) table = table.add('weight',weights) - table.save_ASCII(sys.stdout if name is None else name) + table.save_ASCII(sys.stdout if name is None else name,legacy=True) From 505d8fef21c01308e77bade1a91edb2493d7eecd Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 16 Sep 2020 09:22:49 -0400 Subject: [PATCH 05/27] reverted references of seed_fromX tests to legacy ASCII style in PRIVATE --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 3e789fc51..52b0af920 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 3e789fc515dc2bdc63e042286dc06d8d072d096f +Subproject commit 52b0af9208bfcd988a2d8152d2c2dfd7dea42e30 From 2c741455f09c5ebe259a2d2d7e46d63ced0e599d Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 16 Sep 2020 19:30:09 -0400 Subject: [PATCH 06/27] fixed left-over to_vtk --> save_vtk in PRIVATE --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 52b0af920..0b5204381 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 52b0af9208bfcd988a2d8152d2c2dfd7dea42e30 +Subproject commit 0b5204381cc8b86212ed8c0de43110d0175badd4 From 4d4283a03249be87a9e6a955da66e80aad428489 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 12:03:21 +0200 Subject: [PATCH 07/27] report next pipeline --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ed5e762b7..c412b0abe 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -87,6 +87,7 @@ checkout: - echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; + echo 'next pipeline $(head -n 1 $TESTROOT/GitLabCI.queue)'; done script: - mkdir -p $DAMASKROOT From f34525fd3ac30dc61fffb5909bfa26be7452919e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 12:33:54 +0200 Subject: [PATCH 08/27] allow shell substitution --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c412b0abe..ff58727d5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -87,7 +87,7 @@ checkout: - echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; - echo 'next pipeline $(head -n 1 $TESTROOT/GitLabCI.queue)'; + echo "next pipeline $(head -n 1 $TESTROOT/GitLabCI.queue)"; done script: - mkdir -p $DAMASKROOT From beb8f07a91f5cfc21e2abe283db8bfb860913f7f Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 12:58:37 +0200 Subject: [PATCH 09/27] more helpful information --- .gitlab-ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ff58727d5..fb0f17030 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,6 +25,7 @@ before_script: fi - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; + echo "next pipeline in queue has ID $(head -n 1 $TESTROOT/GitLabCI.queue)"; done - source $DAMASKROOT/env/DAMASK.sh - cd $DAMASKROOT/PRIVATE/testing @@ -87,7 +88,7 @@ checkout: - echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; - echo "next pipeline $(head -n 1 $TESTROOT/GitLabCI.queue)"; + echo "next pipeline in queue has ID $(head -n 1 $TESTROOT/GitLabCI.queue)"; done script: - mkdir -p $DAMASKROOT From 25ad07764081f27f4f774db79afdfc7a8c5fe2f9 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 14:19:23 +0200 Subject: [PATCH 10/27] report all pipelines in front of me --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index fb0f17030..bb4a2e7a0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,7 +25,7 @@ before_script: fi - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; - echo "next pipeline in queue has ID $(head -n 1 $TESTROOT/GitLabCI.queue)"; + echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n"; done - source $DAMASKROOT/env/DAMASK.sh - cd $DAMASKROOT/PRIVATE/testing @@ -88,7 +88,7 @@ checkout: - echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; - echo "next pipeline in queue has ID $(head -n 1 $TESTROOT/GitLabCI.queue)"; + echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n"; done script: - mkdir -p $DAMASKROOT From ec7dbb4c9ade601fcd4475ed0c6b61fd1b1f4259 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 14:21:30 +0200 Subject: [PATCH 11/27] do not store temp files --- .../Visualize_hybridIA_sampling-checkpoint.ipynb | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 python/tests/reference/Rotation/.ipynb_checkpoints/Visualize_hybridIA_sampling-checkpoint.ipynb diff --git a/python/tests/reference/Rotation/.ipynb_checkpoints/Visualize_hybridIA_sampling-checkpoint.ipynb b/python/tests/reference/Rotation/.ipynb_checkpoints/Visualize_hybridIA_sampling-checkpoint.ipynb deleted file mode 100644 index 7fec51502..000000000 --- a/python/tests/reference/Rotation/.ipynb_checkpoints/Visualize_hybridIA_sampling-checkpoint.ipynb +++ /dev/null @@ -1,6 +0,0 @@ -{ - "cells": [], - "metadata": {}, - "nbformat": 4, - "nbformat_minor": 4 -} From e2cd3da29551233d5068acc249c20838726ac2d9 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 14:29:27 +0200 Subject: [PATCH 12/27] remaning to_xx --- python/tests/test_Orientation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tests/test_Orientation.py b/python/tests/test_Orientation.py index 1c92340bf..bee3c8c59 100644 --- a/python/tests/test_Orientation.py +++ b/python/tests/test_Orientation.py @@ -106,7 +106,7 @@ class TestOrientation: coords = np.array([(1,i+1) for i,x in enumerate(eu)]) table = Table(eu,{'Eulers':(3,)}) table = table.add('pos',coords) - table.to_ASCII(reference) + table.write(reference) assert np.allclose(eu,Table.load_ASCII(reference).get('Eulers')) @pytest.mark.parametrize('lattice',Lattice.lattices) From 1849ff0330bc7fa0e06e5d3c6102d3e86536d666 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 15:03:51 +0200 Subject: [PATCH 13/27] polishing --- python/damask/_geom.py | 4 ---- python/damask/_table.py | 34 +++++++++++++++----------------- python/tests/test_Orientation.py | 2 +- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/python/damask/_geom.py b/python/damask/_geom.py index 8fa0b533a..60f7b90a5 100644 --- a/python/damask/_geom.py +++ b/python/damask/_geom.py @@ -453,8 +453,6 @@ class Geom: Parameters ---------- - geom : Geom object - Geometry to write. fname : str or file handle Geometry file to write with extension '.geom'. pack : bool, optional @@ -526,8 +524,6 @@ class Geom: Parameters ---------- - geom : Geom object - Geometry to write with extension '.vtr'. fname : str, optional Filename to write. If no file is given, a string is returned. Valid extension is .vtr, it will be appended if not given. diff --git a/python/damask/_table.py b/python/damask/_table.py index 987233153..3215cd8db 100644 --- a/python/damask/_table.py +++ b/python/damask/_table.py @@ -66,7 +66,7 @@ class Table: @staticmethod def load_ASCII(fname): """ - Create table from ASCII file. + Load ASCII table file. In legacy style, the first line indicates the number of subsequent header lines as "N header", with the last header line being @@ -124,7 +124,7 @@ class Table: @staticmethod def load_ang(fname): """ - Create table from TSL ang file. + Load ang file. A valid TSL ang file needs to contains the following columns: * Euler angles (Bunge notation) in radians, 3 floats, label 'eu'. @@ -341,14 +341,12 @@ class Table: return dup - def save_ASCII(table,fname,legacy=False): + def save_ASCII(self,fname,legacy=False): """ - Store as plain text file. + Save as plain text file. Parameters ---------- - table : Table object - Table to write. fname : file, str, or pathlib.Path Filename or file for writing. legacy : Boolean, optional @@ -358,23 +356,23 @@ class Table: """ seen = set() labels = [] - for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]: - if table.shapes[l] == (1,): + for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]: + if self.shapes[l] == (1,): labels.append(f'{l}') - elif len(table.shapes[l]) == 1: + elif len(self.shapes[l]) == 1: labels += [f'{i+1}_{l}' \ - for i in range(table.shapes[l][0])] + for i in range(self.shapes[l][0])] else: - labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \ - for i in range(np.prod(table.shapes[l]))] + labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \ + for i in range(np.prod(self.shapes[l]))] - header = ([f'{len(table.comments)+1} header'] + table.comments) if legacy else \ - [f'# {comment}' for comment in table.comments] + header = ([f'{len(self.comments)+1} header'] + self.comments) if legacy else \ + [f'# {comment}' for comment in self.comments] try: - f = open(fname,'w') + fhandle = open(fname,'w') except TypeError: - f = fname + fhandle = fname - for line in header + [' '.join(labels)]: f.write(line+'\n') - table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False) + for line in header + [' '.join(labels)]: fhandle.write(line+'\n') + self.data.to_csv(fhandle,sep=' ',na_rep='nan',index=False,header=False) diff --git a/python/tests/test_Orientation.py b/python/tests/test_Orientation.py index bee3c8c59..3a7425ddb 100644 --- a/python/tests/test_Orientation.py +++ b/python/tests/test_Orientation.py @@ -106,7 +106,7 @@ class TestOrientation: coords = np.array([(1,i+1) for i,x in enumerate(eu)]) table = Table(eu,{'Eulers':(3,)}) table = table.add('pos',coords) - table.write(reference) + table.save_ASCII(reference) assert np.allclose(eu,Table.load_ASCII(reference).get('Eulers')) @pytest.mark.parametrize('lattice',Lattice.lattices) From c935ba12153161f4abc90985e760642e40a315c9 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 16:23:23 +0200 Subject: [PATCH 14/27] user friendly self reporting --- python/damask/_table.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/damask/_table.py b/python/damask/_table.py index 3215cd8db..9789c8be6 100644 --- a/python/damask/_table.py +++ b/python/damask/_table.py @@ -29,6 +29,9 @@ class Table: self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() } self._label_uniform() + def __repr__(self): + """Brief overview.""" + return util.srepr(self.comments)+'\n'+self.data.__repr__() def __copy__(self): """Copy Table.""" From f23b89f05596b952b5b58c3e84bcb38b13014acd Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 16:30:22 +0200 Subject: [PATCH 15/27] get rid of annoying warning during test --- python/damask/util.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/damask/util.py b/python/damask/util.py index a04ee47de..655f75879 100644 --- a/python/damask/util.py +++ b/python/damask/util.py @@ -172,8 +172,9 @@ def scale_to_coprime(v): m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int) m = m//reduce(np.gcd,m) - if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]): - raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?') + with np.errstate(divide='ignore'): + if not np.allclose(v/m,v[0]/m[0]): + raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?') return m From c42511f101c9607ffa2d80efee1c6dfadde548ec Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 18 Sep 2020 16:32:08 +0200 Subject: [PATCH 16/27] small polishing --- python/damask/_geom.py | 33 ++++++++++++++------------------- python/damask/_vtk.py | 2 ++ python/tests/test_Geom.py | 15 ++++----------- 3 files changed, 20 insertions(+), 30 deletions(-) diff --git a/python/damask/_geom.py b/python/damask/_geom.py index 60f7b90a5..be4f596c1 100644 --- a/python/damask/_geom.py +++ b/python/damask/_geom.py @@ -1,7 +1,5 @@ -import sys import copy -import multiprocessing -from io import StringIO +import multiprocessing as mp from functools import partial import numpy as np @@ -404,7 +402,7 @@ class Geom: seeds_p = seeds coords = grid_filters.cell_coord0(grid,size).reshape(-1,3) - pool = multiprocessing.Pool(processes = int(environment.options['DAMASK_NUM_THREADS'])) + pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS'])) result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords]) pool.close() pool.join() @@ -447,7 +445,7 @@ class Geom: ) - def save_ASCII(self,fname,pack=None): + def save_ASCII(self,fname,compress=None): """ Writes a geom file. @@ -455,7 +453,7 @@ class Geom: ---------- fname : str or file handle Geometry file to write with extension '.geom'. - pack : bool, optional + compress : bool, optional Compress geometry with 'x of y' and 'a to b'. """ @@ -467,10 +465,10 @@ class Geom: grid = self.get_grid() - if pack is None: + if compress is None: plain = grid.prod()/self.N_microstructure < 250 else: - plain = not pack + plain = not compress if plain: format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \ @@ -518,7 +516,7 @@ class Geom: f.write(f'{reps} of {former}\n') - def save_vtr(self,fname=None): + def save_vtr(self,fname,compress=True): """ Generates vtk rectilinear grid. @@ -527,24 +525,21 @@ class Geom: fname : str, optional Filename to write. If no file is given, a string is returned. Valid extension is .vtr, it will be appended if not given. + compress : bool, optional + Compress with zlib algorithm. Defaults to True. """ v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) v.add(self.microstructure.flatten(order='F'),'materialpoint') v.add_comments(self.comments) - if fname: - v.save(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') - else: - sys.stdout.write(v.__repr__()) + v.save(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr',parallel=False,compress=compress) - def as_ASCII(self,pack=False): - """Format geometry as human-readable ASCII.""" - f = StringIO() - self.save_ASCII(f,pack) - f.seek(0) - return ''.join(f.readlines()) + def show(self): + """Show on screen.""" + v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) + v.show() def add_primitive(self,dimension,center,exponent, diff --git a/python/damask/_vtk.py b/python/damask/_vtk.py index 2e424aefc..c1fe52f38 100644 --- a/python/damask/_vtk.py +++ b/python/damask/_vtk.py @@ -178,6 +178,8 @@ class VTK: Filename for writing. parallel : boolean, optional Write data in parallel background process. Defaults to True. + compress : bool, optional + Compress with zlib algorithm. Defaults to True. """ if isinstance(self.vtk_data,vtk.vtkRectilinearGrid): diff --git a/python/tests/test_Geom.py b/python/tests/test_Geom.py index 8c9e84c1f..12f50be96 100644 --- a/python/tests/test_Geom.py +++ b/python/tests/test_Geom.py @@ -69,18 +69,11 @@ class TestGeom: def test_write_read_file(self,default,tmpdir): with open(tmpdir/'default.geom','w') as f: - default.save_ASCII(f,pack=True) + default.save_ASCII(f,compress=True) with open(tmpdir/'default.geom') as f: new = Geom.load_ASCII(f) assert geom_equal(default,new) - def test_write_as_ASCII(self,default,tmpdir): - with open(tmpdir/'str.geom','w') as f: - f.write(default.as_ASCII()) - with open(tmpdir/'str.geom') as f: - new = Geom.load_ASCII(f) - assert geom_equal(default,new) - def test_read_write_vtr(self,default,tmpdir): default.save_vtr(tmpdir/'default') for _ in range(10): @@ -107,9 +100,9 @@ class TestGeom: Geom.load_vtr(tmpdir/'no_materialpoint.vtr') - @pytest.mark.parametrize('pack',[True,False]) - def test_pack(self,default,tmpdir,pack): - default.save_ASCII(tmpdir/'default.geom',pack=pack) + @pytest.mark.parametrize('compress',[True,False]) + def test_compress(self,default,tmpdir,compress): + default.save_ASCII(tmpdir/'default.geom',compress=compress) new = Geom.load_ASCII(tmpdir/'default.geom') assert geom_equal(new,default) From d6b681569326ba2d0602020d0efb0f1dbf053ed1 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Fri, 18 Sep 2020 11:35:58 -0400 Subject: [PATCH 17/27] resolve conflicting PRIVATE/master changes --- PRIVATE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PRIVATE b/PRIVATE index 0b5204381..576f3af61 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 0b5204381cc8b86212ed8c0de43110d0175badd4 +Subproject commit 576f3af61c3d893b608809cc91e46647809010f1 From 7cbd422ae38d22b741831663081c66f2c5ae3e06 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Sat, 19 Sep 2020 13:38:32 -0400 Subject: [PATCH 18/27] fixed coprime error when encountering NaN --- python/damask/util.py | 2 +- python/tests/test_util.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/python/damask/util.py b/python/damask/util.py index 655f75879..6432ed4e1 100644 --- a/python/damask/util.py +++ b/python/damask/util.py @@ -173,7 +173,7 @@ def scale_to_coprime(v): m = m//reduce(np.gcd,m) with np.errstate(divide='ignore'): - if not np.allclose(v/m,v[0]/m[0]): + if not np.allclose(np.ma.masked_invalid(v/m),v[np.argmax(abs(v))]/m[np.argmax(abs(v))]): raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?') return m diff --git a/python/tests/test_util.py b/python/tests/test_util.py index c786e0de1..588cc823f 100644 --- a/python/tests/test_util.py +++ b/python/tests/test_util.py @@ -18,16 +18,17 @@ class TestUtil: @pytest.mark.parametrize('input,output', [ - ([2,0],[1,0]), - ([0.5,0.5],[1,1]), + ([0,-2],[0,-1]), + ([-0.5,0.5],[-1,1]), ([1./2.,1./3.],[3,2]), ([2./3.,1./2.,1./3.],[4,3,2]), ]) def test_scale2coprime(self,input,output): + print(util.scale_to_coprime(np.array(input))) assert np.allclose(util.scale_to_coprime(np.array(input)), np.array(output).astype(int)) def test_lackofprecision(self): with pytest.raises(ValueError): - util.scale_to_coprime(np.array([1/3333,1,1])) + util.scale_to_coprime(np.array([1/333.333,1,1])) From 3cc319ef08471ac6a2a6f6dd7f2fb3b34fec92fe Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Sat, 19 Sep 2020 14:30:58 -0400 Subject: [PATCH 19/27] removed debug print statement in test_util --- python/tests/test_util.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/tests/test_util.py b/python/tests/test_util.py index 588cc823f..053045741 100644 --- a/python/tests/test_util.py +++ b/python/tests/test_util.py @@ -25,7 +25,6 @@ class TestUtil: ]) def test_scale2coprime(self,input,output): - print(util.scale_to_coprime(np.array(input))) assert np.allclose(util.scale_to_coprime(np.array(input)), np.array(output).astype(int)) From 5b0b0de6b4018d9678a8a6a8ff38859b9c55d695 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Sun, 20 Sep 2020 12:22:41 -0400 Subject: [PATCH 20/27] assertion reports offense; fixed seeds for spherical and fiber --- python/tests/test_Rotation.py | 99 ++++++++++++++--------------------- 1 file changed, 39 insertions(+), 60 deletions(-) diff --git a/python/tests/test_Rotation.py b/python/tests/test_Rotation.py index 5435895a2..45dfc4f5e 100644 --- a/python/tests/test_Rotation.py +++ b/python/tests/test_Rotation.py @@ -461,7 +461,7 @@ def mul(me, other): if other.shape == (3,): A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:]) B = 2.0 * np.dot(me.quaternion[1:],other) - C = 2.0 * _P*me.quaternion[0] + C = 2.0 * _P * me.quaternion[0] return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other) @@ -496,9 +496,8 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) if np.isclose(rot.as_quaternion()[0],0.0,atol=atol): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o),1.0) + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om), (Rotation._om2eu,Rotation._eu2om), @@ -512,8 +511,7 @@ class TestRotation: m = rot.as_matrix() o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.det(o),1.0) + assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu), (Rotation._eu2om,Rotation._om2eu), @@ -531,9 +529,9 @@ class TestRotation: ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol): sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]]) - ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all() + ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol) + assert ok and (np.zeros(3)-1.e-9 <= o).all() \ + and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax), (Rotation._ax2om,Rotation._om2ax), @@ -548,9 +546,8 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) if np.isclose(m[3],np.pi,atol=atol): - ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9 + ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro), #(Rotation._ro2om,Rotation._om2ro), @@ -566,8 +563,7 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol) ok = ok or np.isclose(m[3],0.0,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho), (Rotation._ho2om,Rotation._om2ho), @@ -581,8 +577,7 @@ class TestRotation: m = rot.as_homochoric() o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.linalg.norm(o) < _R1 + 1.e-9 + assert ok and np.linalg.norm(o) < _R1 + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu), (Rotation._cu2om,Rotation._om2cu), @@ -598,8 +593,7 @@ class TestRotation: ok = np.allclose(m,o,atol=atol) if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)): ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9 + assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om), (Rotation._qu2eu,qu2eu), @@ -612,8 +606,7 @@ class TestRotation: vectorized(qu.reshape(qu.shape[0]//2,-1,4)) co = vectorized(qu) for q,c in zip(qu,co): - print(q,c) - assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)) + assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)), f'{q},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu), @@ -625,8 +618,7 @@ class TestRotation: vectorized(om.reshape(om.shape[0]//2,-1,3,3)) co = vectorized(om) for o,c in zip(om,co): - print(o,c) - assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)) + assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)), f'{o},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu), (Rotation._eu2om,eu2om), @@ -638,8 +630,7 @@ class TestRotation: vectorized(eu.reshape(eu.shape[0]//2,-1,3)) co = vectorized(eu) for e,c in zip(eu,co): - print(e,c) - assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)) + assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)), f'{e},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu), (Rotation._ax2om,ax2om), @@ -651,8 +642,7 @@ class TestRotation: vectorized(ax.reshape(ax.shape[0]//2,-1,4)) co = vectorized(ax) for a,c in zip(ax,co): - print(a,c) - assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)) + assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)), f'{a},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax), @@ -663,8 +653,7 @@ class TestRotation: vectorized(ro.reshape(ro.shape[0]//2,-1,4)) co = vectorized(ro) for r,c in zip(ro,co): - print(r,c) - assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)) + assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)), f'{r},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax), (Rotation._ho2cu,ho2cu)]) @@ -674,8 +663,7 @@ class TestRotation: vectorized(ho.reshape(ho.shape[0]//2,-1,3)) co = vectorized(ho) for h,c in zip(ho,co): - print(h,c) - assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)) + assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)), f'{h},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)]) def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single): @@ -684,8 +672,7 @@ class TestRotation: vectorized(cu.reshape(cu.shape[0]//2,-1,3)) co = vectorized(cu) for u,c in zip(cu,co): - print(u,c) - assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)) + assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)), f'{u},{c}' @pytest.mark.parametrize('func',[Rotation.from_axis_angle]) def test_normalization_vectorization(self,func): @@ -703,9 +690,8 @@ class TestRotation: o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion() ok = np.allclose(m,o,atol=atol) if np.isclose(rot.as_quaternion()[0],0.0,atol=atol): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o),1.0) + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('normalize',[True,False]) @@ -717,12 +703,12 @@ class TestRotation: o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers() u = np.array([np.pi*2,np.pi,np.pi*2]) ok = np.allclose(m,o,atol=atol) - ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) + ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol): sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]]) - ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all() + ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol) + assert ok and (np.zeros(3)-1.e-9 <= o).all() \ + and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}' def test_matrix(self,set_of_rotations): for rot in set_of_rotations: @@ -731,8 +717,8 @@ class TestRotation: ok = np.allclose(m,o,atol=atol) if np.isclose(m[3],np.pi,atol=atol): ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9 + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \ + and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('normalize',[True,False]) @@ -742,8 +728,7 @@ class TestRotation: m = rot.as_matrix() o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix() ok = np.allclose(m,o,atol=atol) - print(m,o) - assert ok and np.isclose(np.linalg.det(o),1.0) + assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}' @pytest.mark.parametrize('P',[1,-1]) def test_homochoric(self,set_of_rotations,P): @@ -753,8 +738,7 @@ class TestRotation: o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues() ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol) ok = ok or np.isclose(m[3],0.0,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) def test_cubochoric(self,set_of_rotations,P): @@ -762,8 +746,7 @@ class TestRotation: m = rot.as_homochoric() o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric() ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9 + assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('accept_homomorph',[True,False]) @@ -774,9 +757,8 @@ class TestRotation: o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric() ok = np.allclose(m,o,atol=atol) if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9 + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('reciprocal',[True,False]) def test_basis(self,set_of_rotations,reciprocal): @@ -858,8 +840,7 @@ class TestRotation: for rot in set_of_rotations: v = rot.broadcast_to((5,)) @ data for i in range(data.shape[0]): - print(i-data[i]) - assert np.allclose(mul(rot,data[i]),v[i]) + assert np.allclose(mul(rot,data[i]),v[i]), f'{i-data[i]}' @pytest.mark.parametrize('data',[np.random.rand(3), @@ -927,33 +908,31 @@ class TestRotation: @pytest.mark.parametrize('N',[1000,10000,100000]) def test_spherical_component(self,N,sigma): c = Rotation.from_random() - o = Rotation.from_spherical_component(c,sigma,N) + o = Rotation.from_spherical_component(c,sigma,N,seed=N+sigma) _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) angles[::2] *= -1 # flip angle for every second to symmetrize distribution p = stats.normaltest(angles)[1] sigma_out = np.std(angles) - print(f'\np: {p}, sigma ratio {sigma/sigma_out}') - assert (.9 < sigma/sigma_out < 1.1) and p > 0.001 + assert (.9 < sigma/sigma_out < 1.1) and p > 1, f'{sigma/sigma_out},{p}' @pytest.mark.parametrize('sigma',[5,10,15,20]) @pytest.mark.parametrize('N',[1000,10000,100000]) def test_from_fiber_component(self,N,sigma): """https://en.wikipedia.org/wiki/Full_width_at_half_maximum.""" - alpha = np.random.random(2)*np.pi - beta = np.random.random(2)*np.pi + alpha = np.random.random()*2*np.pi,np.arccos(np.random.random()) + beta = np.random.random()*2*np.pi,np.arccos(np.random.random()) f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system - o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) + o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False,seed=N+sigma) angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) p = stats.normaltest(dist)[1] sigma_out = np.degrees(np.std(dist)) - print(f'\np: {p}, sigma ratio {sigma/sigma_out}') - assert (.9 < sigma/sigma_out < 1.1) and p > 0.001 + assert (.9 < sigma/sigma_out < 1.1) and p > 0.001, f'{sigma/sigma_out},{p}' From 5895e740290429dca0a7d8d9e54b63af0920d8c9 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 19:13:54 +0200 Subject: [PATCH 21/27] p is never above 1 1e-4 is quite low, usually we are far above. 1e-3 from https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.normaltest.html is not too far away. --- python/tests/test_Rotation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/tests/test_Rotation.py b/python/tests/test_Rotation.py index 45dfc4f5e..3785e8da1 100644 --- a/python/tests/test_Rotation.py +++ b/python/tests/test_Rotation.py @@ -908,13 +908,13 @@ class TestRotation: @pytest.mark.parametrize('N',[1000,10000,100000]) def test_spherical_component(self,N,sigma): c = Rotation.from_random() - o = Rotation.from_spherical_component(c,sigma,N,seed=N+sigma) + o = Rotation.from_spherical_component(c,sigma,N) _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) angles[::2] *= -1 # flip angle for every second to symmetrize distribution p = stats.normaltest(angles)[1] sigma_out = np.std(angles) - assert (.9 < sigma/sigma_out < 1.1) and p > 1, f'{sigma/sigma_out},{p}' + assert (.9 < sigma/sigma_out < 1.1) and p > 1e-4, f'{sigma/sigma_out},{p}' @pytest.mark.parametrize('sigma',[5,10,15,20]) @@ -929,10 +929,10 @@ class TestRotation: ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system - o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False,seed=N+sigma) + o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) p = stats.normaltest(dist)[1] sigma_out = np.degrees(np.std(dist)) - assert (.9 < sigma/sigma_out < 1.1) and p > 0.001, f'{sigma/sigma_out},{p}' + assert (.9 < sigma/sigma_out < 1.1) and p > 1.e-4, f'{sigma/sigma_out},{p}' From 6ab88aad2b946dfc2f050f0b7f6ba56c14627668 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 20:45:06 +0200 Subject: [PATCH 22/27] default format for Table is ASCII hence, renamed load_ASCII/save_ASCII to load/save --- PRIVATE | 2 +- processing/legacy/addAPS34IDEstrainCoords.py | 4 ++-- processing/legacy/addCumulative.py | 4 ++-- processing/post/DADF5_postResults.py | 2 +- processing/post/addCompatibilityMismatch.py | 4 ++-- processing/post/addCurl.py | 4 ++-- processing/post/addDerivative.py | 4 ++-- processing/post/addDisplacement.py | 6 +++--- processing/post/addDivergence.py | 4 ++-- processing/post/addEuclideanDistance.py | 4 ++-- processing/post/addGaussian.py | 4 ++-- processing/post/addGradient.py | 4 ++-- processing/post/addOrientations.py | 4 ++-- processing/post/addSchmidfactors.py | 4 ++-- processing/post/permuteData.py | 4 ++-- processing/pre/geom_fromDREAM3D.py | 2 +- processing/pre/geom_fromTable.py | 2 +- processing/pre/geom_fromVoronoiTessellation.py | 4 ++-- processing/pre/geom_grainGrowth.py | 2 +- processing/pre/hybridIA_linODFsampling.py | 2 +- processing/pre/seeds_fromDistribution.py | 4 ++-- processing/pre/seeds_fromGeom.py | 2 +- processing/pre/seeds_fromPokes.py | 2 +- processing/pre/seeds_fromRandom.py | 2 +- python/damask/_colormap.py | 4 ++-- python/damask/_table.py | 4 ++-- python/damask/_test.py | 8 ++++---- python/tests/test_Orientation.py | 4 ++-- python/tests/test_Table.py | 18 +++++++++--------- 29 files changed, 59 insertions(+), 59 deletions(-) diff --git a/PRIVATE b/PRIVATE index 576f3af61..7c543a98e 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 576f3af61c3d893b608809cc91e46647809010f1 +Subproject commit 7c543a98e89840f1f1540c7af8c62a19084dab6e diff --git a/processing/legacy/addAPS34IDEstrainCoords.py b/processing/legacy/addAPS34IDEstrainCoords.py index 8f566b614..465f03e4e 100755 --- a/processing/legacy/addAPS34IDEstrainCoords.py +++ b/processing/legacy/addAPS34IDEstrainCoords.py @@ -42,10 +42,10 @@ rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi]) for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) coord = - table.get(options.frame) coord[:,2] += table.get(options.depth)[:,0] table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII((sys.stdout if name is None else name),legacy=True) + .save((sys.stdout if name is None else name),legacy=True) diff --git a/processing/legacy/addCumulative.py b/processing/legacy/addCumulative.py index 0d88cc575..3ba527acd 100755 --- a/processing/legacy/addCumulative.py +++ b/processing/legacy/addCumulative.py @@ -39,10 +39,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: table = table.add('cum_{}({})'.format('prod' if options.product else 'sum',label), np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name),legacy=True) + table.save((sys.stdout if name is None else name),legacy=True) diff --git a/processing/post/DADF5_postResults.py b/processing/post/DADF5_postResults.py index 0226ad551..02eb72d87 100755 --- a/processing/post/DADF5_postResults.py +++ b/processing/post/DADF5_postResults.py @@ -60,4 +60,4 @@ for filename in options.filenames: os.mkdir(dirname,0o755) file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0], inc[3:].zfill(N_digits)) - table.save_ASCII(os.path.join(dirname,file_out),legacy=True) + table.save(os.path.join(dirname,file_out),legacy=True) diff --git a/processing/post/addCompatibilityMismatch.py b/processing/post/addCompatibilityMismatch.py index 40a1391e0..0e7d3ea42 100755 --- a/processing/post/addCompatibilityMismatch.py +++ b/processing/post/addCompatibilityMismatch.py @@ -172,7 +172,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) @@ -191,4 +191,4 @@ for name in filenames: volumeMismatch.reshape(-1,1,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 5a5f4c074..699fc945f 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDerivative.py b/processing/post/addDerivative.py index 63a5bf73e..99016f4ef 100755 --- a/processing/post/addDerivative.py +++ b/processing/post/addDerivative.py @@ -65,10 +65,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: table = table.add('d({})/d({})'.format(label,options.coordinates), derivative(table.get(options.coordinates),table.get(label)), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index 079bcd970..a6cff86ab 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -47,7 +47,7 @@ parser.set_defaults(f = 'f', for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) @@ -60,7 +60,7 @@ for name in filenames: .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True) + .save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True) else: table.add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), @@ -68,4 +68,4 @@ for name in filenames: .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ - .save_ASCII((sys.stdout if name is None else name), legacy=True) + .save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDivergence.py b/processing/post/addDivergence.py index bc4880788..208a0f7b6 100755 --- a/processing/post/addDivergence.py +++ b/processing/post/addDivergence.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index 7e99dc3d5..fc43542bd 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -142,7 +142,7 @@ for i,feature in enumerate(features): for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) neighborhood = neighborhoods[options.neighborhood] @@ -184,4 +184,4 @@ for name in filenames: distance[i,:], scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGaussian.py b/processing/post/addGaussian.py index 095a17bc2..f00122c63 100755 --- a/processing/post/addGaussian.py +++ b/processing/post/addGaussian.py @@ -63,7 +63,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) damask.grid_filters.coord0_check(table.get(options.pos)) for label in options.labels: @@ -73,4 +73,4 @@ for name in filenames: mode = 'wrap' if options.periodic else 'nearest'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGradient.py b/processing/post/addGradient.py index 69241598c..d049b65d7 100755 --- a/processing/post/addGradient.py +++ b/processing/post/addGradient.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index 16f8d62d6..6a02cca08 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -110,7 +110,7 @@ R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degree for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) if options.eulers is not None: label = options.eulers @@ -147,4 +147,4 @@ for name in filenames: if 'axisangle' in options.output: table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index beaf18331..8f43308cb 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -175,7 +175,7 @@ labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]' for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) o = damask.Rotation.from_quaternion(table.get(options.quaternion)) @@ -189,4 +189,4 @@ for name in filenames: for i,label in enumerate(labels): table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/permuteData.py b/processing/post/permuteData.py index 34451404c..073ccfd9f 100755 --- a/processing/post/permuteData.py +++ b/processing/post/permuteData.py @@ -47,7 +47,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file rng = np.random.default_rng(randomSeed) @@ -58,4 +58,4 @@ for name in filenames: rng.shuffle(uniques) table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) - table.save_ASCII((sys.stdout if name is None else name), legacy=True) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/pre/geom_fromDREAM3D.py b/processing/pre/geom_fromDREAM3D.py index 471435766..3faa07a17 100755 --- a/processing/pre/geom_fromDREAM3D.py +++ b/processing/pre/geom_fromDREAM3D.py @@ -154,4 +154,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.save_ASCII(os.path.splitext(name)[0]+'.geom',pack=False) + geom.save_ASCII(os.path.splitext(name)[0]+'.geom',compress=False) diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index 11c0761b5..3d6618bd2 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -68,7 +68,7 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) diff --git a/processing/pre/geom_fromVoronoiTessellation.py b/processing/pre/geom_fromVoronoiTessellation.py index a3a54882a..5586d3bc8 100755 --- a/processing/pre/geom_fromVoronoiTessellation.py +++ b/processing/pre/geom_fromVoronoiTessellation.py @@ -171,7 +171,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) size = np.ones(3) origin = np.zeros(3) @@ -228,4 +228,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False) diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index dbe1f1a74..5fa0ed0b5 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -172,4 +172,4 @@ for name in filenames: geom = geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - geom.save_ASCII(sys.stdout if name is None else name,pack=False) + geom.save_ASCII(sys.stdout if name is None else name,compress=False) diff --git a/processing/pre/hybridIA_linODFsampling.py b/processing/pre/hybridIA_linODFsampling.py index 01704197e..f99a2dd89 100755 --- a/processing/pre/hybridIA_linODFsampling.py +++ b/processing/pre/hybridIA_linODFsampling.py @@ -234,7 +234,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file random.seed(randomSeed) diff --git a/processing/pre/seeds_fromDistribution.py b/processing/pre/seeds_fromDistribution.py index cc72920d3..e1e4726c2 100755 --- a/processing/pre/seeds_fromDistribution.py +++ b/processing/pre/seeds_fromDistribution.py @@ -78,7 +78,7 @@ class myThread (threading.Thread): perturbedSeedsVFile = StringIO() myBestSeedsVFile.seek(0) - perturbedSeedsTable = damask.Table.load_ASCII(myBestSeedsVFile) + perturbedSeedsTable = damask.Table.load(myBestSeedsVFile) coords = perturbedSeedsTable.get('pos') i = 0 for ms,coord in enumerate(coords): @@ -89,7 +89,7 @@ class myThread (threading.Thread): coords[i]=newCoords direction[i]*=2. i+= 1 - perturbedSeedsTable.set('pos',coords).save_ASCII(perturbedSeedsVFile,legacy=True) + perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True) #--- do tesselation with perturbed seed file ------------------------------------------------------ perturbedGeomVFile.close() diff --git a/processing/pre/seeds_fromGeom.py b/processing/pre/seeds_fromGeom.py index 4110405d6..962c1dbce 100755 --- a/processing/pre/seeds_fromGeom.py +++ b/processing/pre/seeds_fromGeom.py @@ -65,4 +65,4 @@ for name in filenames: damask.Table(seeds[mask],{'pos':(3,)},comments)\ .add('microstructure',microstructure[mask].astype(int))\ - .save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True) + .save(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True) diff --git a/processing/pre/seeds_fromPokes.py b/processing/pre/seeds_fromPokes.py index fdff40da3..c804cd285 100755 --- a/processing/pre/seeds_fromPokes.py +++ b/processing/pre/seeds_fromPokes.py @@ -92,5 +92,5 @@ for name in filenames: table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) table.set('microstructure',table.get('microstructure').astype(np.int))\ - .save_ASCII(sys.stdout if name is None else \ + .save(sys.stdout if name is None else \ os.path.splitext(name)[0]+f'_poked_{options.N}.seeds',legacy=True) diff --git a/processing/pre/seeds_fromRandom.py b/processing/pre/seeds_fromRandom.py index 8700a4648..451e218aa 100755 --- a/processing/pre/seeds_fromRandom.py +++ b/processing/pre/seeds_fromRandom.py @@ -162,4 +162,4 @@ for name in filenames: else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N) table = table.add('weight',weights) - table.save_ASCII(sys.stdout if name is None else name,legacy=True) + table.save(sys.stdout if name is None else name,legacy=True) diff --git a/python/damask/_colormap.py b/python/damask/_colormap.py index f3065ebc8..fa7d36ec2 100644 --- a/python/damask/_colormap.py +++ b/python/damask/_colormap.py @@ -297,9 +297,9 @@ class Colormap(mpl.colors.ListedColormap): if fhandle is None: with open(self.name.replace(' ','_')+'.txt', 'w') as f: - t.save_ASCII(f) + t.save(f) else: - t.save_ASCII(fhandle) + t.save(fhandle) def save_GOM(self,fname=None): diff --git a/python/damask/_table.py b/python/damask/_table.py index 9789c8be6..431cf1886 100644 --- a/python/damask/_table.py +++ b/python/damask/_table.py @@ -67,7 +67,7 @@ class Table: @staticmethod - def load_ASCII(fname): + def load(fname): """ Load ASCII table file. @@ -344,7 +344,7 @@ class Table: return dup - def save_ASCII(self,fname,legacy=False): + def save(self,fname,legacy=False): """ Save as plain text file. diff --git a/python/damask/_test.py b/python/damask/_test.py index ab4df6d68..5cadc9dfe 100644 --- a/python/damask/_test.py +++ b/python/damask/_test.py @@ -287,9 +287,9 @@ class Test: import numpy as np logging.info('\n '.join(['comparing',File1,File2])) - table = damask.Table.load_ASCII(File1) + table = damask.Table.load(File1) len1 = len(table.comments)+2 - table = damask.Table.load_ASCII(File2) + table = damask.Table.load(File2) len2 = len(table.comments)+2 refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True)) @@ -436,7 +436,7 @@ class Test: if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] - tables = [damask.Table.load_ASCII(filename) for filename in files] + tables = [damask.Table.load(filename) for filename in files] for table in tables: table._label_discrete() @@ -486,7 +486,7 @@ class Test: if len(files) < 2: return True # single table is always close to itself... - tables = [damask.Table.load_ASCII(filename) for filename in files] + tables = [damask.Table.load(filename) for filename in files] columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files columns = columns[:len(files)] # truncate to same length as files diff --git a/python/tests/test_Orientation.py b/python/tests/test_Orientation.py index 3a7425ddb..669f73e91 100644 --- a/python/tests/test_Orientation.py +++ b/python/tests/test_Orientation.py @@ -106,8 +106,8 @@ class TestOrientation: coords = np.array([(1,i+1) for i,x in enumerate(eu)]) table = Table(eu,{'Eulers':(3,)}) table = table.add('pos',coords) - table.save_ASCII(reference) - assert np.allclose(eu,Table.load_ASCII(reference).get('Eulers')) + table.save(reference) + assert np.allclose(eu,Table.load(reference).get('Eulers')) @pytest.mark.parametrize('lattice',Lattice.lattices) def test_disorientation360(self,lattice): diff --git a/python/tests/test_Table.py b/python/tests/test_Table.py index af940a037..7a86c7fed 100644 --- a/python/tests/test_Table.py +++ b/python/tests/test_Table.py @@ -35,30 +35,30 @@ class TestTable: @pytest.mark.parametrize('mode',['str','path']) def test_write_read(self,default,tmpdir,mode): - default.save_ASCII(tmpdir/'default.txt') + default.save(tmpdir/'default.txt') if mode == 'path': - new = Table.load_ASCII(tmpdir/'default.txt') + new = Table.load(tmpdir/'default.txt') elif mode == 'str': - new = Table.load_ASCII(str(tmpdir/'default.txt')) + new = Table.load(str(tmpdir/'default.txt')) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_read_file(self,default,tmpdir): with open(tmpdir/'default.txt','w') as f: - default.save_ASCII(f) + default.save(f) with open(tmpdir/'default.txt') as f: - new = Table.load_ASCII(f) + new = Table.load(f) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_read_legacy_style(self,default,tmpdir): with open(tmpdir/'legacy.txt','w') as f: - default.save_ASCII(f,legacy=True) + default.save(f,legacy=True) with open(tmpdir/'legacy.txt') as f: - new = Table.load_ASCII(f) + new = Table.load(f) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_invalid_format(self,default,tmpdir): with pytest.raises(TypeError): - default.save_ASCII(tmpdir/'shouldnotbethere.txt',format='invalid') + default.save(tmpdir/'shouldnotbethere.txt',format='invalid') @pytest.mark.parametrize('mode',['str','path']) def test_read_ang(self,reference_dir,mode): @@ -78,7 +78,7 @@ class TestTable: @pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt']) def test_read_strange(self,reference_dir,fname): with open(reference_dir/fname) as f: - Table.load_ASCII(f) + Table.load(f) def test_set(self,default): d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F') From d33507866d3f71a1d450178f8c4ae82811b55212 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 21:50:52 +0200 Subject: [PATCH 23/27] statistically more valid test --- python/tests/test_Rotation.py | 43 ++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/python/tests/test_Rotation.py b/python/tests/test_Rotation.py index 3785e8da1..66cabfbd4 100644 --- a/python/tests/test_Rotation.py +++ b/python/tests/test_Rotation.py @@ -907,32 +907,39 @@ class TestRotation: @pytest.mark.parametrize('sigma',[5,10,15,20]) @pytest.mark.parametrize('N',[1000,10000,100000]) def test_spherical_component(self,N,sigma): - c = Rotation.from_random() - o = Rotation.from_spherical_component(c,sigma,N) - _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) - angles[::2] *= -1 # flip angle for every second to symmetrize distribution + p = [] + for run in range(5): + c = Rotation.from_random() + o = Rotation.from_spherical_component(c,sigma,N) + _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) + angles[::2] *= -1 # flip angle for every second to symmetrize distribution + + p.append(stats.normaltest(angles)[1]) - p = stats.normaltest(angles)[1] sigma_out = np.std(angles) - assert (.9 < sigma/sigma_out < 1.1) and p > 1e-4, f'{sigma/sigma_out},{p}' + p = np.average(p) + assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}' @pytest.mark.parametrize('sigma',[5,10,15,20]) @pytest.mark.parametrize('N',[1000,10000,100000]) def test_from_fiber_component(self,N,sigma): - """https://en.wikipedia.org/wiki/Full_width_at_half_maximum.""" - alpha = np.random.random()*2*np.pi,np.arccos(np.random.random()) - beta = np.random.random()*2*np.pi,np.arccos(np.random.random()) + p = [] + for run in range(5): + alpha = np.random.random()*2*np.pi,np.arccos(np.random.random()) + beta = np.random.random()*2*np.pi,np.arccos(np.random.random()) - f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) - f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) - ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) - n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system + f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) + f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) + ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) + n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system - o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) - angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) - dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) + o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) + angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) + dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) + + p.append(stats.normaltest(dist)[1]) - p = stats.normaltest(dist)[1] sigma_out = np.degrees(np.std(dist)) - assert (.9 < sigma/sigma_out < 1.1) and p > 1.e-4, f'{sigma/sigma_out},{p}' + p = np.average(p) + assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}' From 24febcd15b9244d66b033a42ef76d95f29c208bb Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 21:51:09 +0200 Subject: [PATCH 24/27] forgotten rename --- processing/pre/geom_fromMinimalSurface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/pre/geom_fromMinimalSurface.py b/processing/pre/geom_fromMinimalSurface.py index b64bac417..b52003d79 100755 --- a/processing/pre/geom_fromMinimalSurface.py +++ b/processing/pre/geom_fromMinimalSurface.py @@ -89,4 +89,4 @@ geom=damask.Geom(microstructure,options.size, comments=[scriptID + ' ' + ' '.join(sys.argv[1:])]) damask.util.croak(geom) -geom.save_ASCII(sys.stdout if name is None else name,pack=False) +geom.save_ASCII(sys.stdout if name is None else name,compress=False) From 7bdd44a3d9f9132f2d2b811bb4ec11cd8d9dd424 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 22:04:28 +0200 Subject: [PATCH 25/27] consistent names: default file operations: save/load, non-default save_xx/load_xx --- PRIVATE | 2 +- processing/pre/geom_fromOsteonGeometry.py | 2 +- python/damask/_geom.py | 4 ++-- python/damask/_result.py | 2 +- python/tests/test_Geom.py | 10 +++++----- python/tests/test_Result.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/PRIVATE b/PRIVATE index 7c543a98e..bb3011806 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 7c543a98e89840f1f1540c7af8c62a19084dab6e +Subproject commit bb30118067c33a2b8ba3e0f1cee52ca4a0b786d8 diff --git a/processing/pre/geom_fromOsteonGeometry.py b/processing/pre/geom_fromOsteonGeometry.py index c2583d0ed..0b1440d63 100755 --- a/processing/pre/geom_fromOsteonGeometry.py +++ b/processing/pre/geom_fromOsteonGeometry.py @@ -142,4 +142,4 @@ geom = damask.Geom(microstructure.reshape(grid), homogenization=options.homogenization,comments=header) damask.util.croak(geom) -geom.save_ASCII(sys.stdout if name is None else name,pack=False) +geom.save_ASCII(sys.stdout if name is None else name,compress=False) diff --git a/python/damask/_geom.py b/python/damask/_geom.py index be4f596c1..41e1e22da 100644 --- a/python/damask/_geom.py +++ b/python/damask/_geom.py @@ -348,7 +348,7 @@ class Geom: @staticmethod - def load_vtr(fname): + def load(fname): """ Read a VTK rectilinear grid. @@ -516,7 +516,7 @@ class Geom: f.write(f'{reps} of {former}\n') - def save_vtr(self,fname,compress=True): + def save(self,fname,compress=True): """ Generates vtk rectilinear grid. diff --git a/python/damask/_result.py b/python/damask/_result.py index 02d0c0abe..5e8a9a9d0 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1100,7 +1100,7 @@ class Result: pool.join() - def write_XDMF(self): + def save_XDMF(self): """ Write XDMF file to directly visualize data in DADF5 file. diff --git a/python/tests/test_Geom.py b/python/tests/test_Geom.py index 12f50be96..dbd2f795b 100644 --- a/python/tests/test_Geom.py +++ b/python/tests/test_Geom.py @@ -75,12 +75,12 @@ class TestGeom: assert geom_equal(default,new) def test_read_write_vtr(self,default,tmpdir): - default.save_vtr(tmpdir/'default') + default.save(tmpdir/'default') for _ in range(10): time.sleep(.2) if os.path.exists(tmpdir/'default.vtr'): break - new = Geom.load_vtr(tmpdir/'default.vtr') + new = Geom.load(tmpdir/'default.vtr') assert geom_equal(new,default) def test_invalid_geom(self,tmpdir): @@ -97,7 +97,7 @@ class TestGeom: time.sleep(.2) if os.path.exists(tmpdir/'no_materialpoint.vtr'): break with pytest.raises(ValueError): - Geom.load_vtr(tmpdir/'no_materialpoint.vtr') + Geom.load(tmpdir/'no_materialpoint.vtr') @pytest.mark.parametrize('compress',[True,False]) @@ -188,11 +188,11 @@ class TestGeom: current = default.clean(stencil,selection,periodic) reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}' if update and stencil > 1: - current.save_vtr(reference) + current.save(reference) for _ in range(10): time.sleep(.2) if os.path.exists(reference.with_suffix('.vtr')): break - assert geom_equal(Geom.load_vtr(reference) if stencil > 1 else default, + assert geom_equal(Geom.load(reference) if stencil > 1 else default, current ) diff --git a/python/tests/test_Result.py b/python/tests/test_Result.py index c25bf7a4c..68b72badf 100644 --- a/python/tests/test_Result.py +++ b/python/tests/test_Result.py @@ -343,4 +343,4 @@ class TestResult: def test_XDMF(self,tmp_path,single_phase): os.chdir(tmp_path) - single_phase.write_XDMF() + single_phase.save_XDMF() From 783b74966213284bc2de2de213309f127cbf5e06 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 20 Sep 2020 22:25:08 +0200 Subject: [PATCH 26/27] compress instead of pack (same name for vtk/geom) --- processing/pre/geom_fromTable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index 3d6618bd2..6f2cb5b4d 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -105,4 +105,4 @@ for name in filenames: homogenization=options.homogenization,comments=header) damask.util.croak(geom) - geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False) From 790d3a742eb98f270a5777ab1fc6972ecde78af6 Mon Sep 17 00:00:00 2001 From: Test User Date: Tue, 22 Sep 2020 20:32:28 +0200 Subject: [PATCH 27/27] [skip ci] updated version information after successful test of v3.0.0-alpha-275-g7801f527f --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index e073be898..a05ebe3cf 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v3.0.0-alpha-245-g5ef761fb9 +v3.0.0-alpha-275-g7801f527f