diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ed5e762b7..bb4a2e7a0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,6 +25,7 @@ before_script: fi - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; + echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n"; done - source $DAMASKROOT/env/DAMASK.sh - cd $DAMASKROOT/PRIVATE/testing @@ -87,6 +88,7 @@ checkout: - echo $CI_PIPELINE_ID >> $TESTROOT/GitLabCI.queue - while [ $(awk "/$CI_PIPELINE_ID/{print NR}" $TESTROOT/GitLabCI.queue) != 1 ]; do sleep 5m; + echo -e "Currently queued pipelines:\n$(cat $TESTROOT/GitLabCI.queue)\n"; done script: - mkdir -p $DAMASKROOT diff --git a/PRIVATE b/PRIVATE index 555f3e01f..2550bf655 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 555f3e01f2b5cf43ade1bd48423b890adca21771 +Subproject commit 2550bf655aebe61ce459718323ba38b1ac205a7f diff --git a/VERSION b/VERSION index bebbc79d0..a8cc04d22 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v3.0.0-alpha-233-g190f8a82 +v3.0.0-alpha-321-g0f6495430 diff --git a/processing/legacy/addAPS34IDEstrainCoords.py b/processing/legacy/addAPS34IDEstrainCoords.py index 9ba51ada1..465f03e4e 100755 --- a/processing/legacy/addAPS34IDEstrainCoords.py +++ b/processing/legacy/addAPS34IDEstrainCoords.py @@ -42,11 +42,10 @@ rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi]) for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) + coord = - table.get(options.frame) coord[:,2] += table.get(options.depth)[:,0] - table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:])) - - table.to_file(sys.stdout if name is None else name) + table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))\ + .save((sys.stdout if name is None else name),legacy=True) diff --git a/processing/legacy/addCumulative.py b/processing/legacy/addCumulative.py index f84828a89..3ba527acd 100755 --- a/processing/legacy/addCumulative.py +++ b/processing/legacy/addCumulative.py @@ -39,10 +39,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: - table.add('cum_{}({})'.format('prod' if options.product else 'sum',label), - np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), - scriptID+' '+' '.join(sys.argv[1:])) + table = table.add('cum_{}({})'.format('prod' if options.product else 'sum',label), + np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0), + scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name),legacy=True) diff --git a/processing/post/DADF5_postResults.py b/processing/post/DADF5_postResults.py index e81330581..02eb72d87 100755 --- a/processing/post/DADF5_postResults.py +++ b/processing/post/DADF5_postResults.py @@ -38,8 +38,8 @@ for filename in options.filenames: N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1 N_digits = 5 # hack to keep test intact for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)): - table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)}) - table = table.add('pos',coords.reshape(-1,3)) + table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\ + .add('pos',coords.reshape(-1,3)) results.pick('materialpoints',False) results.pick('constituents', True) @@ -60,4 +60,4 @@ for filename in options.filenames: os.mkdir(dirname,0o755) file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0], inc[3:].zfill(N_digits)) - table.to_file(os.path.join(dirname,file_out)) + table.save(os.path.join(dirname,file_out),legacy=True) diff --git a/processing/post/addCompatibilityMismatch.py b/processing/post/addCompatibilityMismatch.py index 5009d44a0..0e7d3ea42 100755 --- a/processing/post/addCompatibilityMismatch.py +++ b/processing/post/addCompatibilityMismatch.py @@ -172,7 +172,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) @@ -191,4 +191,4 @@ for name in filenames: volumeMismatch.reshape(-1,1,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 1033e3303..699fc945f 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDerivative.py b/processing/post/addDerivative.py index b6b19c98a..99016f4ef 100755 --- a/processing/post/addDerivative.py +++ b/processing/post/addDerivative.py @@ -14,9 +14,9 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) def derivative(coordinates,what): - + result = np.empty_like(what) - + # use differentiation by interpolation # as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf @@ -31,7 +31,7 @@ def derivative(coordinates,what): (coordinates[0] - coordinates[1]) result[-1,:] = (what[-1,:] - what[-2,:]) / \ (coordinates[-1] - coordinates[-2]) - + return result @@ -65,10 +65,10 @@ if options.labels is None: for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) for label in options.labels: table = table.add('d({})/d({})'.format(label,options.coordinates), derivative(table.get(options.coordinates),table.get(label)), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index f1ab565b0..a6cff86ab 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -47,25 +47,25 @@ parser.set_defaults(f = 'f', for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) if options.nodal: - table = damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'), + damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'), {'pos':(3,)})\ .add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), - scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') + scriptID+' '+' '.join(sys.argv[1:]))\ + .save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True) else: - table = table.add('avg({}).{}'.format(options.f,options.pos), + table.add('avg({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), scriptID+' '+' '.join(sys.argv[1:]))\ .add('fluct({}).{}'.format(options.f,options.pos), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), - scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + scriptID+' '+' '.join(sys.argv[1:]))\ + .save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addDivergence.py b/processing/post/addDivergence.py index 6495793cf..208a0f7b6 100755 --- a/processing/post/addDivergence.py +++ b/processing/post/addDivergence.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addEuclideanDistance.py b/processing/post/addEuclideanDistance.py index f5cf58ab3..fc43542bd 100755 --- a/processing/post/addEuclideanDistance.py +++ b/processing/post/addEuclideanDistance.py @@ -142,7 +142,7 @@ for i,feature in enumerate(features): for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) neighborhood = neighborhoods[options.neighborhood] @@ -158,7 +158,7 @@ for name in filenames: diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point... # ...to every one in the specified neighborhood # for same IDs at both locations ==> 0 - + diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)... # ...reflects number of unique neighbors uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0]) @@ -184,4 +184,4 @@ for name in filenames: distance[i,:], scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGaussian.py b/processing/post/addGaussian.py index 8e58da884..f00122c63 100755 --- a/processing/post/addGaussian.py +++ b/processing/post/addGaussian.py @@ -63,7 +63,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) damask.grid_filters.coord0_check(table.get(options.pos)) for label in options.labels: @@ -73,4 +73,4 @@ for name in filenames: mode = 'wrap' if options.periodic else 'nearest'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addGradient.py b/processing/post/addGradient.py index 718a972f3..d049b65d7 100755 --- a/processing/post/addGradient.py +++ b/processing/post/addGradient.py @@ -43,7 +43,7 @@ if options.labels is None: parser.error('no data column specified.') for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) for label in options.labels: @@ -55,4 +55,4 @@ for name in filenames: grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index dddc14193..6a02cca08 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -110,7 +110,7 @@ R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degree for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) if options.eulers is not None: label = options.eulers @@ -147,4 +147,4 @@ for name in filenames: if 'axisangle' in options.output: table = table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index dc4117d78..8f43308cb 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -175,7 +175,7 @@ labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]' for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) o = damask.Rotation.from_quaternion(table.get(options.quaternion)) @@ -189,4 +189,4 @@ for name in filenames: for i,label in enumerate(labels): table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/post/filterTable.py b/processing/post/filterTable.py index 494257a60..4f4af088b 100755 --- a/processing/post/filterTable.py +++ b/processing/post/filterTable.py @@ -27,7 +27,7 @@ def sortingList(labels,whitelistitems): else: indices.append(0) names.append(label) - + return [indices,names,whitelistitems] @@ -72,11 +72,11 @@ for name in filenames: continue damask.util.report(scriptName,name) -# ------------------------------------------ assemble info --------------------------------------- +# ------------------------------------------ assemble info --------------------------------------- table.head_read() -# ------------------------------------------ process data --------------------------------------- +# ------------------------------------------ process data --------------------------------------- specials = { \ '_row_': 0, @@ -103,12 +103,12 @@ for name in filenames: else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem else: order = range(len(labels)) # maintain original order of labels - + # --------------------------------------- evaluate condition --------------------------------------- if options.condition is not None: condition = options.condition # copy per file, since might be altered inline breaker = False - + for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups idx = table.label_index(column) dim = table.label_dimension(column) @@ -123,11 +123,11 @@ for name in filenames: 's#':'str'}[marker],idx) # take float or string value of data column elif dim > 1: # multidimensional input (vector, tensor, etc.) replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation - + condition = condition.replace('#'+all+'#',replacement) - + if breaker: continue # found mistake in condition evaluation --> next file - + # ------------------------------------------ assemble header --------------------------------------- table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) @@ -138,7 +138,7 @@ for name in filenames: # ------------------------------------------ process and output data ------------------------------------------ positions = np.array(positions)[order] - + atOnce = options.condition is None if atOnce: # read full array and filter columns try: diff --git a/processing/post/permuteData.py b/processing/post/permuteData.py index 316fdd3da..073ccfd9f 100755 --- a/processing/post/permuteData.py +++ b/processing/post/permuteData.py @@ -47,7 +47,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file rng = np.random.default_rng(randomSeed) @@ -58,4 +58,4 @@ for name in filenames: rng.shuffle(uniques) table = table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) - table.to_file(sys.stdout if name is None else name) + table.save((sys.stdout if name is None else name), legacy=True) diff --git a/processing/pre/geom_fromDREAM3D.py b/processing/pre/geom_fromDREAM3D.py index 6b7ccc21a..daf7d2ab9 100755 --- a/processing/pre/geom_fromDREAM3D.py +++ b/processing/pre/geom_fromDREAM3D.py @@ -52,16 +52,11 @@ parser.add_option('-q', '--quaternion', type = 'string', metavar='string', help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]') -parser.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index to be used [%default]') parser.set_defaults(pointwise = 'CellData', quaternion = 'Quats', phase = 'Phases', microstructure = 'FeatureIds', - homogenization = 1, ) (options, filenames) = parser.parse_args() @@ -150,8 +145,7 @@ for name in filenames: header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\ + config_header - geom = damask.Geom(microstructure,size,origin, - homogenization=options.homogenization,comments=header) + geom = damask.Geom(microstructure,size,origin,comments=header) damask.util.croak(geom) - geom.to_file(os.path.splitext(name)[0]+'.geom',format='ASCII',pack=False) + geom.save_ASCII(os.path.splitext(name)[0]+'.geom',compress=False) diff --git a/processing/pre/geom_fromMinimalSurface.py b/processing/pre/geom_fromMinimalSurface.py index e6289ba0a..83d1a3684 100755 --- a/processing/pre/geom_fromMinimalSurface.py +++ b/processing/pre/geom_fromMinimalSurface.py @@ -52,10 +52,6 @@ parser.add_option('-p', '--periods', dest = 'periods', type = 'int', metavar = 'int', help = 'number of repetitions of unit cell [%default]') -parser.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index to be used [%default]') parser.add_option('--m', dest = 'microstructure', type = 'int', nargs = 2, metavar = 'int int', @@ -66,7 +62,6 @@ parser.set_defaults(type = minimal_surfaces[0], periods = 1, grid = (16,16,16), size = (1.0,1.0,1.0), - homogenization = 1, microstructure = (1,2), ) @@ -85,8 +80,7 @@ microstructure = np.where(options.threshold < surface[options.type](x,y,z), options.microstructure[1],options.microstructure[0]) geom=damask.Geom(microstructure,options.size, - homogenization=options.homogenization, comments=[scriptID + ' ' + ' '.join(sys.argv[1:])]) damask.util.croak(geom) -geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) +geom.save_ASCII(sys.stdout if name is None else name,compress=False) diff --git a/processing/pre/geom_fromOsteonGeometry.py b/processing/pre/geom_fromOsteonGeometry.py index 6c6326163..2f9de712d 100755 --- a/processing/pre/geom_fromOsteonGeometry.py +++ b/processing/pre/geom_fromOsteonGeometry.py @@ -57,10 +57,6 @@ parser.add_option('-w', '--omega', dest='omega', type='float', metavar = 'float', help='rotation angle around normal of osteon [%default]') -parser.add_option( '--homogenization', - dest='homogenization', - type='int', metavar = 'int', - help='homogenization index to be used [%default]') parser.set_defaults(canal = 25e-6, osteon = 100e-6, @@ -70,7 +66,7 @@ parser.set_defaults(canal = 25e-6, amplitude = 60, size = (300e-6,300e-6), grid = (512,512), - homogenization = 1) + ) (options,filename) = parser.parse_args() @@ -139,7 +135,7 @@ header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\ + config_header geom = damask.Geom(microstructure.reshape(grid), size,-size/2, - homogenization=options.homogenization,comments=header) + comments=header) damask.util.croak(geom) -geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) +geom.save_ASCII(sys.stdout if name is None else name,compress=False) diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index dc968b82e..a0de6a4c5 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -44,14 +44,9 @@ parser.add_option('--axes', dest = 'axes', type = 'string', nargs = 3, metavar = ' '.join(['string']*3), help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]') -parser.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index to be used [%default]') -parser.set_defaults(homogenization = 1, - pos = 'pos', +parser.set_defaults(pos = 'pos', ) (options,filenames) = parser.parse_args() @@ -68,7 +63,7 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table.sort_by(['{}_{}'.format(i,options.pos) for i in range(3,0,-1)]) # x fast, y slow grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) @@ -102,8 +97,7 @@ for name in filenames: header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\ + config_header geom = damask.Geom(microstructure,size,origin, - homogenization=options.homogenization,comments=header) + comments=header) damask.util.croak(geom) - geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom', - format='ASCII',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False) diff --git a/processing/pre/geom_fromVoronoiTessellation.py b/processing/pre/geom_fromVoronoiTessellation.py index 67a0dfecc..aee79cc05 100755 --- a/processing/pre/geom_fromVoronoiTessellation.py +++ b/processing/pre/geom_fromVoronoiTessellation.py @@ -142,10 +142,6 @@ group.add_option('--without-config', dest = 'config', action = 'store_false', help = 'omit material configuration header') -group.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index to be used [%default]') group.add_option('--phase', dest = 'phase', type = 'int', metavar = 'int', @@ -157,7 +153,6 @@ parser.set_defaults(pos = 'pos', weight = 'weight', microstructure = 'microstructure', eulers = 'euler', - homogenization = 1, phase = 1, cpus = 2, laguerre = False, @@ -171,7 +166,7 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) size = np.ones(3) origin = np.zeros(3) @@ -225,8 +220,7 @@ for name in filenames: header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\ + config_header geom = damask.Geom(indices.reshape(grid),size,origin, - homogenization=options.homogenization,comments=header) + comments=header) damask.util.croak(geom) - geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom', - format='ASCII',pack=False) + geom.save_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',compress=False) diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index 0652d0583..b5793f703 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -41,7 +41,7 @@ parser.add_option('-N', '--iterations', help = 'curvature flow iterations [%default]') parser.add_option('-i', '--immutable', action = 'extend', dest = 'immutable', metavar = '', - help = 'list of immutable microstructure indices') + help = 'list of immutable material indices') parser.add_option('--ndimage', dest = 'ndimage', action='store_true', help = 'use ndimage.gaussian_filter in lieu of explicit FFT') @@ -62,17 +62,17 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - grid_original = geom.get_grid() + grid_original = geom.grid damask.util.croak(geom) - microstructure = np.tile(geom.microstructure,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1 - grid = np.array(microstructure.shape) + material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1 + grid = np.array(material.shape) # --- initialize support data --------------------------------------------------------------------- -# store a copy the initial microstructure to find locations of immutable indices - microstructure_original = np.copy(microstructure) +# store a copy of the initial material indices to find locations of immutable indices + material_original = np.copy(material) if not options.ndimage: X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]] @@ -88,14 +88,14 @@ for name in filenames: for smoothIter in range(options.N): - interfaceEnergy = np.zeros(microstructure.shape,dtype=np.float32) + interfaceEnergy = np.zeros(material.shape,dtype=np.float32) for i in (-1,0,1): for j in (-1,0,1): for k in (-1,0,1): # assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood) interfaceEnergy = np.maximum(interfaceEnergy, - getInterfaceEnergy(microstructure,np.roll(np.roll(np.roll( - microstructure,i,axis=0), j,axis=1), k,axis=2))) + getInterfaceEnergy(material,np.roll(np.roll(np.roll( + material,i,axis=0), j,axis=1), k,axis=2))) # periodically extend interfacial energy array by half a grid size in positive and negative directions periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2, @@ -129,13 +129,13 @@ for name in filenames: iterations = int(round(options.d*2.))-1),# fat boundary periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary... grid[1]//2:-grid[1]//2, - grid[2]//2:-grid[2]//2], # ...and zero everywhere else + grid[2]//2:-grid[2]//2], # ...and zero everywhere else 0.)).astype(np.complex64) * gauss).astype(np.float32) periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2, grid[1]//2:-grid[1]//2, - grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy + grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy # transform voxels close to interface region @@ -143,33 +143,35 @@ for name in filenames: return_distances = False, return_indices = True) # want index of closest bulk grain - periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2, - grid[1]//2:-grid[1]//2, - grid[2]//2:-grid[2]//2] # periodically extend the microstructure + periodic_material = np.tile(material,(3,3,3))[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # periodically extend the geometry - microstructure = periodic_microstructure[index[0], - index[1], - index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2, - grid[1]//2:-grid[1]//2, - grid[2]//2:-grid[2]//2] # extent grains into interface region + material = periodic_material[index[0], + index[1], + index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # extent grains into interface region - # replace immutable microstructures with closest mutable ones - index = ndimage.morphology.distance_transform_edt(np.in1d(microstructure,options.immutable).reshape(grid), + # replace immutable materials with closest mutable ones + index = ndimage.morphology.distance_transform_edt(np.in1d(material,options.immutable).reshape(grid), return_distances = False, return_indices = True) - microstructure = microstructure[index[0], - index[1], - index[2]] + material = material[index[0], + index[1], + index[2]] - immutable = np.zeros(microstructure.shape, dtype=np.bool) - # find locations where immutable microstructures have been in original structure + immutable = np.zeros(material.shape, dtype=np.bool) + # find locations where immutable materials have been in original structure for micro in options.immutable: - immutable += microstructure_original == micro + immutable += material_original == micro - # undo any changes involving immutable microstructures - microstructure = np.where(immutable, microstructure_original,microstructure) + # undo any changes involving immutable materials + material = np.where(immutable, material_original,material) - geom=geom.duplicate(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - - geom.to_file(sys.stdout if name is None else name,format='ASCII',pack=False) + damask.Geom(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]], + size = geom.size, + origin = geom.origin, + comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])], + )\ + .save_ASCII(sys.stdout if name is None else name,compress=False) diff --git a/processing/pre/hybridIA_linODFsampling.py b/processing/pre/hybridIA_linODFsampling.py index 6f5827f8b..f99a2dd89 100755 --- a/processing/pre/hybridIA_linODFsampling.py +++ b/processing/pre/hybridIA_linODFsampling.py @@ -31,7 +31,7 @@ def binAsBins(bin,intervals): bins[1] = (bin//intervals[2]) % intervals[1] bins[2] = bin % intervals[2] return bins - + def binsAsBin(bins,intervals): """Implode 3D bins into compound bin.""" return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2] @@ -95,7 +95,7 @@ def directInversion (ODF,nSamples): float(nInvSamples)/nOptSamples-1.0, scale,nSamples)) repetition = [None]*ODF['nBins'] # preallocate and clear - + for bin in range(ODF['nBins']): # loop over bins repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition @@ -105,7 +105,7 @@ def directInversion (ODF,nSamples): for bin in range(ODF['nBins']): set[i:i+repetition[bin]] = [bin]*repetition[bin] # fill set with bin, i.e. orientation i += repetition[bin] # advance set counter - + orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples @@ -117,7 +117,7 @@ def directInversion (ODF,nSamples): orientations[j] = np.degrees(Eulers) reconstructedODF[bin] += unitInc set[ex] = set[j] # exchange orientations - + return orientations, reconstructedODF @@ -130,7 +130,7 @@ def MonteCarloEulers (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + for j in range(nSamples): MC = maxdV_V*2.0 bin = 0 @@ -153,7 +153,7 @@ def MonteCarloBins (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + for j in range(nSamples): MC = maxdV_V*2.0 bin = 0 @@ -173,14 +173,14 @@ def TothVanHoutteSTAT (ODF,nSamples): orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples - + selectors = [random.random() for i in range(nSamples)] selectors.sort() indexSelector = 0 - + cumdV_V = 0.0 countSamples = 0 - + for bin in range(ODF['nBins']) : cumdV_V += ODF['dV_V'][bin] while indexSelector < nSamples and selectors[indexSelector] < cumdV_V: @@ -191,7 +191,7 @@ def TothVanHoutteSTAT (ODF,nSamples): indexSelector += 1 damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples)) - + return orientations, reconstructedODF @@ -233,8 +233,8 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + + table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) randomSeed = int(os.urandom(4).hex(),16) if options.randomSeed is None else options.randomSeed # random seed per file random.seed(randomSeed) @@ -253,7 +253,7 @@ for name in filenames: if eulers.shape[0] != ODF['nBins']: damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],eulers.shape[0])) continue - + # ----- build binnedODF array and normalize ------------------------------------------------------ sumdV_V = 0.0 ODF['dV_V'] = [None]*ODF['nBins'] @@ -267,7 +267,7 @@ for name in filenames: if ODF['dV_V'][b] > 0.0: sumdV_V += ODF['dV_V'][b] ODF['nNonZero'] += 1 - + for b in range(ODF['nBins']): ODF['dV_V'][b] /= sumdV_V # normalize dV/V @@ -277,19 +277,19 @@ for name in filenames: 'Volume integral of ODF: %12.11f\n'%sumdV_V, 'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))), ]) - + Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'} method = Functions[options.algorithm] Orientations, ReconstructedODF = (globals()[method])(ODF,options.number) - + # calculate accuracy of sample squaredDiff = {'orig':0.0,method:0.0} squaredRelDiff = {'orig':0.0,method:0.0} mutualProd = {'orig':0.0,method:0.0} indivSum = {'orig':0.0,method:0.0} indivSquaredSum = {'orig':0.0,method:0.0} - + for bin in range(ODF['nBins']): squaredDiff[method] += (ODF['dV_V'][bin] - ReconstructedODF[bin])**2 if ODF['dV_V'][bin] > 0.0: @@ -299,7 +299,7 @@ for name in filenames: indivSquaredSum[method] += ReconstructedODF[bin]**2 indivSum['orig'] += ODF['dV_V'][bin] indivSquaredSum['orig'] += ODF['dV_V'][bin]**2 - + damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(options.number*squaredDiff[method]), 'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]), 'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']), @@ -311,10 +311,10 @@ for name in filenames: (ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\ (indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))), ]) - + if method == 'IA' and options.number < ODF['nNonZero']: strOpt = '(%i)'%ODF['nNonZero'] - + formatwidth = 1+int(math.log10(options.number)) materialConfig = [ @@ -324,12 +324,12 @@ for name in filenames: '', '#-------------------#', ] - + for i,ID in enumerate(range(options.number)): materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)), '(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)), ] - + materialConfig += [ '#-------------------#', '', @@ -338,12 +338,12 @@ for name in filenames: for ID in range(options.number): eulers = Orientations[ID] - + materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)), '(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers), ] -#--- output finalization -------------------------------------------------------------------------- +#--- output finalization -------------------------------------------------------------------------- with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(options.number)+'_material.config','w')) as outfile: outfile.write('\n'.join(materialConfig)+'\n') diff --git a/processing/pre/mentat_spectralBox.py b/processing/pre/mentat_spectralBox.py index 027240044..7d78cb973 100755 --- a/processing/pre/mentat_spectralBox.py +++ b/processing/pre/mentat_spectralBox.py @@ -42,7 +42,7 @@ def output(cmds,locals,dest): else: outFile(str(cmd),locals,dest) - + #------------------------------------------------------------------------------------------------- def init(): return [ @@ -100,7 +100,7 @@ def mesh(r,d): #------------------------------------------------------------------------------------------------- -def material(): +def materials(): return [\ "*new_mater standard", "*mater_option general:state:solid", @@ -114,7 +114,7 @@ def material(): "*add_geometry_elements", "all_existing", ] - + #------------------------------------------------------------------------------------------------- def geometry(): @@ -127,14 +127,14 @@ def geometry(): "*element_type 7", "all_existing", ] - + #------------------------------------------------------------------------------------------------- -def initial_conditions(microstructures): +def initial_conditions(material): elements = [] element = 0 - for id in microstructures: - element += 1 + for id in material: + element += 1 if len(elements) < id: for i in range(id-len(elements)): elements.append([]) @@ -153,7 +153,7 @@ def initial_conditions(microstructures): for grain,elementList in enumerate(elements): cmds.append([\ "*new_icond", - "*icond_name microstructure_%i"%(grain+1), + "*icond_name material_%i"%(grain+1), "*icond_type state_variable", "*icond_param_value state_var_id 2", "*icond_dof_value var %i"%(grain+1), @@ -195,22 +195,22 @@ if filenames == []: filenames = [None] for name in filenames: damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - microstructure = geom.get_microstructure().flatten(order='F') + + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + material = geom.material.flatten(order='F') cmds = [\ init(), mesh(geom.grid,geom.size), - material(), + materials(), geometry(), - initial_conditions(microstructure), + initial_conditions(material), '*identify_sets', '*show_model', '*redraw', '*draw_automatic', ] - + outputLocals = {} if options.port: py_mentat.py_connect('',options.port) diff --git a/processing/pre/seeds_fromDistribution.py b/processing/pre/seeds_fromDistribution.py index 1a4ec6971..48f803d29 100755 --- a/processing/pre/seeds_fromDistribution.py +++ b/processing/pre/seeds_fromDistribution.py @@ -30,7 +30,7 @@ class myThread (threading.Thread): def run(self): global bestSeedsUpdate global bestSeedsVFile - global nMicrostructures + global nMaterials global delta global points global target @@ -70,7 +70,7 @@ class myThread (threading.Thread): selectedMs = [] direction = [] for i in range(NmoveGrains): - selectedMs.append(random.randrange(1,nMicrostructures)) + selectedMs.append(random.randrange(1,nMaterials)) direction.append((np.random.random()-0.5)*delta) @@ -78,7 +78,7 @@ class myThread (threading.Thread): perturbedSeedsVFile = StringIO() myBestSeedsVFile.seek(0) - perturbedSeedsTable = damask.Table.from_ASCII(myBestSeedsVFile) + perturbedSeedsTable = damask.Table.load(myBestSeedsVFile) coords = perturbedSeedsTable.get('pos') i = 0 for ms,coord in enumerate(coords): @@ -89,8 +89,7 @@ class myThread (threading.Thread): coords[i]=newCoords direction[i]*=2. i+= 1 - perturbedSeedsTable.set('pos',coords) - perturbedSeedsTable.to_file(perturbedSeedsVFile) + perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True) #--- do tesselation with perturbed seed file ------------------------------------------------------ perturbedGeomVFile.close() @@ -101,12 +100,12 @@ class myThread (threading.Thread): perturbedGeomVFile.seek(0) #--- evaluate current seeds file ------------------------------------------------------------------ - perturbedGeom = damask.Geom.from_file(perturbedGeomVFile) - myNmicrostructures = len(np.unique(perturbedGeom.microstructure)) - currentData=np.bincount(perturbedGeom.microstructure.ravel())[1:]/points + perturbedGeom = damask.Geom.load_ASCII(perturbedGeomVFile) + myNmaterials = len(np.unique(perturbedGeom.material)) + currentData = np.bincount(perturbedGeom.material.ravel())[1:]/points currentError=[] currentHist=[] - for i in range(nMicrostructures): # calculate the deviation in all bins per histogram + for i in range(nMaterials): # calculate the deviation in all bins per histogram currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0]) currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum())) @@ -118,12 +117,12 @@ class myThread (threading.Thread): bestMatch = match #--- count bin classes with no mismatch ---------------------------------------------------------------------- myMatch=0 - for i in range(nMicrostructures): + for i in range(nMaterials): if currentError[i] > 0.0: break myMatch = i+1 - if myNmicrostructures == nMicrostructures: - for i in range(min(nMicrostructures,myMatch+options.bins)): + if myNmaterials == nMaterials: + for i in range(min(nMaterials,myMatch+options.bins)): if currentError[i] > target[i]['error']: # worse fitting, next try randReset = True break @@ -142,25 +141,25 @@ class myThread (threading.Thread): for line in perturbedSeedsVFile: currentSeedsFile.write(line) bestSeedsVFile.write(line) - for j in range(nMicrostructures): # save new errors for all bins + for j in range(nMaterials): # save new errors for all bins target[j]['error'] = currentError[j] if myMatch > match: # one or more new bins have no deviation damask.util.croak( 'Stage {:d} cleared'.format(myMatch)) match=myMatch sys.stdout.flush() break - if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before: take it to keep on moving + if i == min(nMaterials,myMatch+options.bins)-1: # same quality as before: take it to keep on moving bestSeedsUpdate = time.time() perturbedSeedsVFile.seek(0) bestSeedsVFile.close() bestSeedsVFile = StringIO() bestSeedsVFile.writelines(perturbedSeedsVFile.readlines()) - for j in range(nMicrostructures): + for j in range(nMaterials): target[j]['error'] = currentError[j] randReset = True else: #--- not all grains are tessellated - damask.util.croak('Thread {:d}: Microstructure mismatch ({:d} microstructures mapped)'\ - .format(self.threadID,myNmicrostructures)) + damask.util.croak('Thread {:d}: Material mismatch ({:d} material indices mapped)'\ + .format(self.threadID,myNmaterials)) randReset = True @@ -213,15 +212,15 @@ if options.randomSeed is None: options.randomSeed = int(os.urandom(4).hex(),16) damask.util.croak(options.randomSeed) delta = options.scale/np.array(options.grid) -baseFile=os.path.splitext(os.path.basename(options.seedFile))[0] +baseFile = os.path.splitext(os.path.basename(options.seedFile))[0] points = np.array(options.grid).prod().astype('float') # ----------- calculate target distribution and bin edges -targetGeom = damask.Geom.from_file(os.path.splitext(os.path.basename(options.target))[0]+'.geom') -nMicrostructures = len(np.unique(targetGeom.microstructure)) -targetVolFrac = np.bincount(targetGeom.microstructure.flatten())/targetGeom.grid.prod().astype(np.float) -target=[] -for i in range(1,nMicrostructures+1): +targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom') +nMaterials = len(np.unique(targetGeom.material)) +targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.grid.prod().astype(np.float) +target = [] +for i in range(1,nMaterials+1): targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries target.append({'histogram':targetHist,'bins':targetBins}) @@ -234,7 +233,7 @@ else: bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\ ' -g '+' '.join(list(map(str, options.grid)))+\ ' -r {:d}'.format(options.randomSeed)+\ - ' -N '+str(nMicrostructures))[0]) + ' -N '+str(nMaterials))[0]) bestSeedsUpdate = time.time() # ----------- tessellate initial seed file to get and evaluate geom file @@ -243,13 +242,13 @@ initialGeomVFile = StringIO() initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+ ' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0]) initialGeomVFile.seek(0) -initialGeom = damask.Geom.from_file(initialGeomVFile) +initialGeom = damask.Geom.load_ASCII(initialGeomVFile) -if len(np.unique(targetGeom.microstructure)) != nMicrostructures: - damask.util.croak('error. Microstructure count mismatch') +if len(np.unique(targetGeom.material)) != nMaterials: + damask.util.croak('error. Material count mismatch') -initialData = np.bincount(initialGeom.microstructure.flatten())/points -for i in range(nMicrostructures): +initialData = np.bincount(initialGeom.material.flatten())/points +for i in range(nMaterials): initialHist = np.histogram(initialData,bins=target[i]['bins'])[0] target[i]['error']=np.sqrt(np.square(np.array(target[i]['histogram']-initialHist)).sum()) @@ -258,13 +257,13 @@ if target[0]['error'] > 0.0: target[0]['error'] *=((target[0]['bins'][0]-np.min(initialData))**2.0+ (target[0]['bins'][1]-np.max(initialData))**2.0)**0.5 match=0 -for i in range(nMicrostructures): +for i in range(nMaterials): if target[i]['error'] > 0.0: break match = i+1 if options.maxseeds < 1: - maxSeeds = len(np.unique(initialGeom.microstructure)) + maxSeeds = len(np.unique(initialGeom.material)) else: maxSeeds = options.maxseeds @@ -273,8 +272,8 @@ sys.stdout.flush() initialGeomVFile.close() # start mulithreaded monte carlo simulation -threads=[] -s=threading.Semaphore(1) +threads = [] +s = threading.Semaphore(1) for i in range(options.threads): threads.append(myThread(i)) diff --git a/processing/pre/seeds_fromGeom.py b/processing/pre/seeds_fromGeom.py index 97550ce13..b8d74b651 100755 --- a/processing/pre/seeds_fromGeom.py +++ b/processing/pre/seeds_fromGeom.py @@ -17,7 +17,7 @@ scriptID = ' '.join([scriptName,damask.version]) #-------------------------------------------------------------------------------------------------- parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Create seed file taking microstructure indices from given geom file. +Create seed file taking material indices from given geom file. Indices can be black-listed or white-listed. """, version = scriptID) @@ -46,12 +46,12 @@ options.blacklist = [int(i) for i in options.blacklist] for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - microstructure = geom.get_microstructure().reshape((-1,1),order='F') + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + material = geom.material.reshape((-1,1),order='F') - mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \ + mask = np.logical_and(np.in1d(material,options.whitelist,invert=False) if options.whitelist else \ np.full(geom.grid.prod(),True,dtype=bool), - np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \ + np.in1d(material,options.blacklist,invert=True) if options.blacklist else \ np.full(geom.grid.prod(),True,dtype=bool)) seeds = damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F') @@ -61,8 +61,8 @@ for name in filenames: 'grid\ta {}\tb {}\tc {}'.format(*geom.grid), 'size\tx {}\ty {}\tz {}'.format(*geom.size), 'origin\tx {}\ty {}\tz {}'.format(*geom.origin), - 'homogenization\t{}'.format(geom.homogenization)] + ] - table = damask.Table(seeds[mask],{'pos':(3,)},comments) - table = table.add('microstructure',microstructure[mask]) - table.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds') + damask.Table(seeds[mask],{'pos':(3,)},comments)\ + .add('material',material[mask].astype(int))\ + .save(sys.stdout if name is None else os.path.splitext(name)[0]+'.seeds',legacy=True) diff --git a/processing/pre/seeds_fromPokes.py b/processing/pre/seeds_fromPokes.py index 78172fc23..887d76392 100755 --- a/processing/pre/seeds_fromPokes.py +++ b/processing/pre/seeds_fromPokes.py @@ -52,7 +52,7 @@ options.box = np.array(options.box).reshape(3,2) for name in filenames: damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int) box = np.amax(options.box, axis=1) \ @@ -76,7 +76,7 @@ for name in filenames: g[2] = k + offset[2] g %= geom.grid seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box - seeds[n, 3] = geom.microstructure[g[0],g[1],g[2]] + seeds[n, 3] = geom.material[g[0],g[1],g[2]] if options.x: g[0] += 1 if options.y: g[1] += 1 n += 1 @@ -88,9 +88,9 @@ for name in filenames: 'grid\ta {}\tb {}\tc {}'.format(*geom.grid), 'size\tx {}\ty {}\tz {}'.format(*geom.size), 'origin\tx {}\ty {}\tz {}'.format(*geom.origin), - 'homogenization\t{}'.format(geom.homogenization)] + ] - table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) - table.set('microstructure',table.get('microstructure').astype(np.int)) - table.to_file(sys.stdout if name is None else \ - os.path.splitext(name)[0]+f'_poked_{options.N}.seeds') + table = damask.Table(seeds,{'pos':(3,),'material':(1,)},comments) + table.set('material',table.get('material').astype(np.int))\ + .save(sys.stdout if name is None else \ + os.path.splitext(name)[0]+f'_poked_{options.N}.seeds',legacy=True) diff --git a/processing/pre/seeds_fromRandom.py b/processing/pre/seeds_fromRandom.py index a544528cf..451e218aa 100755 --- a/processing/pre/seeds_fromRandom.py +++ b/processing/pre/seeds_fromRandom.py @@ -154,12 +154,12 @@ for name in filenames: 'randomSeed\t{}'.format(options.randomSeed), ] - table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments) - table = table.add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int)) + table = damask.Table(np.hstack((seeds,eulers)),{'pos':(3,),'euler':(3,)},comments)\ + .add('microstructure',np.arange(options.microstructure,options.microstructure + options.N,dtype=int)) if options.weights: weights = np.random.uniform(low = 0, high = options.max, size = options.N) if options.max > 0.0 \ else np.random.normal(loc = options.mean, scale = options.sigma, size = options.N) table = table.add('weight',weights) - table.to_file(sys.stdout if name is None else name) + table.save(sys.stdout if name is None else name,legacy=True) diff --git a/python/damask/__init__.py b/python/damask/__init__.py index 1404e88d1..3f2ff6813 100644 --- a/python/damask/__init__.py +++ b/python/damask/__init__.py @@ -18,6 +18,7 @@ from ._lattice import Symmetry, Lattice# noqa from ._orientation import Orientation # noqa from ._result import Result # noqa from ._geom import Geom # noqa +from ._material import Material # noqa from . import solver # noqa # deprecated diff --git a/python/damask/_colormap.py b/python/damask/_colormap.py index fccb8642b..fa7d36ec2 100644 --- a/python/damask/_colormap.py +++ b/python/damask/_colormap.py @@ -235,100 +235,128 @@ class Colormap(mpl.colors.ListedColormap): return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name) - def to_file(self,fname=None,format='ParaView'): + + def save_paraview(self,fname=None): """ - Export colormap to file for use in external programs. + Write colormap to JSON file for Paraview. Parameters ---------- fname : file, str, or pathlib.Path, optional. Filename to store results. If not given, the filename will - consist of the name of the colormap and an extension that - depends on the file format. - format : {'ParaView', 'ASCII', 'GOM', 'gmsh'}, optional - File format, defaults to 'ParaView'. Available formats are: - - ParaView: JSON file, extension '.json'. - - ASCII: Plain text file, extension '.txt'. - - GOM: Aramis GOM (DIC), extension '.legend'. - - Gmsh: Gmsh FEM mesh-generator, extension '.msh'. + consist of the name of the colormap and extension '.json'. """ if fname is not None: try: - f = open(fname,'w') + fhandle = open(fname,'w') except TypeError: - f = fname + fhandle = fname else: - f = None + fhandle = None - if format.lower() == 'paraview': - Colormap._export_paraview(self,f) - elif format.lower() == 'ascii': - Colormap._export_ASCII(self,f) - elif format.lower() == 'gom': - Colormap._export_GOM(self,f) - elif format.lower() == 'gmsh': - Colormap._export_gmsh(self,f) - else: - raise ValueError('Unknown output format: {format}.') - - @staticmethod - def _export_paraview(colormap,fhandle=None): - """Write colormap to JSON file for Paraview.""" colors = [] - for i,c in enumerate(np.round(colormap.colors,6).tolist()): + for i,c in enumerate(np.round(self.colors,6).tolist()): colors+=[i]+c out = [{ 'Creator':util.execution_stamp('Colormap'), 'ColorSpace':'RGB', - 'Name':colormap.name, + 'Name':self.name, 'DefaultMap':True, 'RGBPoints':colors }] if fhandle is None: - with open(colormap.name.replace(' ','_')+'.json', 'w') as f: + with open(self.name.replace(' ','_')+'.json', 'w') as f: json.dump(out, f,indent=4) else: json.dump(out,fhandle,indent=4) - @staticmethod - def _export_ASCII(colormap,fhandle=None): - """Write colormap to ASCII table.""" - labels = {'RGBA':4} if colormap.colors.shape[1] == 4 else {'RGB': 3} - t = Table(colormap.colors,labels,f'Creator: {util.execution_stamp("Colormap")}') + + def save_ASCII(self,fname=None): + """ + Write colormap to ASCII table. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.txt'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None + + labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3} + t = Table(self.colors,labels,f'Creator: {util.execution_stamp("Colormap")}') if fhandle is None: - with open(colormap.name.replace(' ','_')+'.txt', 'w') as f: - t.to_file(f,new_style=True) + with open(self.name.replace(' ','_')+'.txt', 'w') as f: + t.save(f) else: - t.to_file(fhandle,new_style=True) + t.save(fhandle) - @staticmethod - def _export_GOM(colormap,fhandle=None): - """Write colormap to GOM Aramis compatible format.""" + + def save_GOM(self,fname=None): + """ + Write colormap to GOM Aramis compatible format. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.legend'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None # ToDo: test in GOM - GOM_str = f'1 1 {colormap.name.replace(" ","_")} 9 {colormap.name.replace(" ","_")} ' \ + GOM_str = '1 1 {name} 9 {name} '.format(name=self.name.replace(" ","_")) \ + '0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' \ - + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colormap.colors)}' \ - + ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((colormap.colors*255).astype(int))]) \ + + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(self.colors)}' \ + + ' '.join([f' 0 {c[0]} {c[1]} {c[2]} 255 1' for c in reversed((self.colors*255).astype(int))]) \ + '\n' if fhandle is None: - with open(colormap.name.replace(' ','_')+'.legend', 'w') as f: + with open(self.name.replace(' ','_')+'.legend', 'w') as f: f.write(GOM_str) else: fhandle.write(GOM_str) - @staticmethod - def _export_gmsh(colormap,fhandle=None): - """Write colormap to Gmsh compatible format.""" + def save_gmsh(self,fname=None): + """ + Write colormap to Gmsh compatible format. + + Parameters + ---------- + fname : file, str, or pathlib.Path, optional. + Filename to store results. If not given, the filename will + consist of the name of the colormap and extension '.msh'. + + """ + if fname is not None: + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + else: + fhandle = None # ToDo: test in gmsh gmsh_str = 'View.ColorTable = {\n' \ - +'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in colormap.colors[:,:3]*255]) \ + +'\n'.join([f'{c[0]},{c[1]},{c[2]},' for c in self.colors[:,:3]*255]) \ +'\n}\n' if fhandle is None: - with open(colormap.name.replace(' ','_')+'.msh', 'w') as f: + with open(self.name.replace(' ','_')+'.msh', 'w') as f: f.write(gmsh_str) else: fhandle.write(gmsh_str) diff --git a/python/damask/_geom.py b/python/damask/_geom.py index f5dc05f2f..6af265021 100644 --- a/python/damask/_geom.py +++ b/python/damask/_geom.py @@ -1,7 +1,5 @@ -import sys import copy -import multiprocessing -from io import StringIO +import multiprocessing as mp from functools import partial import numpy as np @@ -17,39 +15,54 @@ from . import grid_filters class Geom: """Geometry definition for grid solvers.""" - def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]): + def __init__(self,material,size,origin=[0.0,0.0,0.0],comments=[]): """ - New geometry definition from array of microstructures and size. + New geometry definition from array of material, size, and origin. Parameters ---------- - microstructure : numpy.ndarray - Microstructure array (3D) + material : numpy.ndarray + Material index array (3D). size : list or numpy.ndarray - Physical size of the microstructure in meter. + Physical size of the geometry in meter. origin : list or numpy.ndarray, optional - Physical origin of the microstructure in meter. - homogenization : int, optional - Homogenization index. + Physical origin of the geometry in meter. comments : list of str, optional Comment lines. """ - self.set_microstructure(microstructure) - self.set_size(size) - self.set_origin(origin) - self.set_homogenization(homogenization) - self.set_comments(comments) + if len(material.shape) != 3: + raise ValueError(f'Invalid material shape {material.shape}.') + elif material.dtype not in np.sctypes['float'] + np.sctypes['int']: + raise TypeError(f'Invalid material data type {material.dtype}.') + else: + self.material = np.copy(material) + + if self.material.dtype in np.sctypes['float'] and \ + np.all(self.material == self.material.astype(int).astype(float)): + self.material = self.material.astype(int) + + if len(size) != 3 or any(np.array(size) <= 0): + raise ValueError(f'Invalid size {size}.') + else: + self.size = np.array(size) + + if len(origin) != 3: + raise ValueError(f'Invalid origin {origin}.') + else: + self.origin = np.array(origin) + + self.comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)] def __repr__(self): """Basic information on geometry definition.""" return util.srepr([ - f'grid a b c: {util.srepr(self.get_grid ()," x ")}', - f'size x y z: {util.srepr(self.get_size ()," x ")}', - f'origin x y z: {util.srepr(self.get_origin()," ")}', - f'# materialpoints: {self.N_microstructure}', - f'max materialpoint: {np.nanmax(self.microstructure)}', + f'grid a b c: {util.srepr(self.grid, " x ")}', + f'size x y z: {util.srepr(self.size, " x ")}', + f'origin x y z: {util.srepr(self.origin," ")}', + f'# materials: {self.N_materials}', + f'max material: {np.nanmax(self.material)}', ]) @@ -63,43 +76,6 @@ class Geom: return self.__copy__() - def duplicate(self,microstructure=None,size=None,origin=None,comments=None,autosize=False): - """ - Create a duplicate having updated microstructure, size, and origin. - - Parameters - ---------- - microstructure : numpy.ndarray, optional - Microstructure array (3D). - size : list or numpy.ndarray, optional - Physical size of the microstructure in meter. - origin : list or numpy.ndarray, optional - Physical origin of the microstructure in meter. - comments : list of str, optional - Comment lines. - autosize : bool, optional - Ignore size parameter and rescale according to change of grid points. - - """ - if size is not None and autosize: - raise ValueError('Auto-sizing conflicts with explicit size parameter.') - - grid_old = self.get_grid() - dup = self.copy() - dup.set_microstructure(microstructure) - dup.set_origin(origin) - - if comments is not None: - dup.set_comments(comments) - - if size is not None: - dup.set_size(size) - elif autosize: - dup.set_size(dup.get_grid()/grid_old*self.get_size()) - - return dup - - def diff(self,other): """ Report property differences of self relative to other. @@ -111,180 +87,41 @@ class Geom: """ message = [] - if np.any(other.get_grid() != self.get_grid()): - message.append(util.delete(f'grid a b c: {util.srepr(other.get_grid()," x ")}')) - message.append(util.emph( f'grid a b c: {util.srepr( self.get_grid()," x ")}')) + if np.any(other.grid != self.grid): + message.append(util.delete(f'grid a b c: {util.srepr(other.grid," x ")}')) + message.append(util.emph( f'grid a b c: {util.srepr( self.grid," x ")}')) - if np.any(other.get_size() != self.get_size()): - message.append(util.delete(f'size x y z: {util.srepr(other.get_size()," x ")}')) - message.append(util.emph( f'size x y z: {util.srepr( self.get_size()," x ")}')) + if not np.allclose(other.size,self.size): + message.append(util.delete(f'size x y z: {util.srepr(other.size," x ")}')) + message.append(util.emph( f'size x y z: {util.srepr( self.size," x ")}')) - if np.any(other.get_origin() != self.get_origin()): - message.append(util.delete(f'origin x y z: {util.srepr(other.get_origin()," ")}')) - message.append(util.emph( f'origin x y z: {util.srepr( self.get_origin()," ")}')) + if not np.allclose(other.origin,self.origin): + message.append(util.delete(f'origin x y z: {util.srepr(other.origin," ")}')) + message.append(util.emph( f'origin x y z: {util.srepr( self.origin," ")}')) - if other.N_microstructure != self.N_microstructure: - message.append(util.delete(f'# materialpoints: {other.N_microstructure}')) - message.append(util.emph( f'# materialpoints: { self.N_microstructure}')) + if other.N_materials != self.N_materials: + message.append(util.delete(f'# materials: {other.N_materials}')) + message.append(util.emph( f'# materials: { self.N_materials}')) - if np.nanmax(other.microstructure) != np.nanmax(self.microstructure): - message.append(util.delete(f'max materialpoint: {np.nanmax(other.microstructure)}')) - message.append(util.emph( f'max materialpoint: {np.nanmax( self.microstructure)}')) + if np.nanmax(other.material) != np.nanmax(self.material): + message.append(util.delete(f'max material: {np.nanmax(other.material)}')) + message.append(util.emph( f'max material: {np.nanmax( self.material)}')) return util.return_message(message) - def set_comments(self,comments): - """ - Replace all existing comments. - - Parameters - ---------- - comments : list of str - All comments. - - """ - self.comments = [] - self.add_comments(comments) - - - def add_comments(self,comments): - """ - Append comments to existing comments. - - Parameters - ---------- - comments : list of str - New comments. - - """ - self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)] - - - def set_microstructure(self,microstructure): - """ - Replace the existing microstructure representation. - - The complete microstructure is replaced (indcluding grid definition), - unless a masked array is provided in which case the grid dimensions - need to match and masked entries are not replaced. - - Parameters - ---------- - microstructure : numpy.ndarray or numpy.ma.core.MaskedArray of shape (:,:,:) - Microstructure indices. - - """ - if microstructure is not None: - if isinstance(microstructure,np.ma.core.MaskedArray): - self.microstructure = np.where(microstructure.mask, - self.microstructure,microstructure.data) - else: - self.microstructure = np.copy(microstructure) - - if self.microstructure.dtype in np.sctypes['float'] and \ - np.all(self.microstructure == self.microstructure.astype(int).astype(float)): - self.microstructure = self.microstructure.astype(int) - - if len(self.microstructure.shape) != 3: - raise ValueError(f'Invalid microstructure shape {microstructure.shape}') - elif self.microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']: - raise TypeError(f'Invalid microstructure data type {microstructure.dtype}') - - - def set_size(self,size): - """ - Replace the existing size information. - - Parameters - ---------- - size : list or numpy.ndarray - Physical size of the microstructure in meter. - - """ - if size is not None: - if len(size) != 3 or any(np.array(size) <= 0): - raise ValueError(f'Invalid size {size}') - else: - self.size = np.array(size) - - - def set_origin(self,origin): - """ - Replace the existing origin information. - - Parameters - ---------- - origin : list or numpy.ndarray - Physical origin of the microstructure in meter. - - """ - if origin is not None: - if len(origin) != 3: - raise ValueError(f'Invalid origin {origin}') - else: - self.origin = np.array(origin) - - - def set_homogenization(self,homogenization): - """ - Replace the existing homogenization index. - - Parameters - ---------- - homogenization : int - Homogenization index. - - """ - if homogenization is not None: - if not isinstance(homogenization,int) or homogenization < 1: - raise TypeError(f'Invalid homogenization {homogenization}.') - else: - self.homogenization = homogenization - - @property def grid(self): - return self.get_grid() + return np.asarray(self.material.shape) @property - def N_microstructure(self): - return np.unique(self.microstructure).size - - - def get_microstructure(self): - """Return the microstructure representation.""" - return np.copy(self.microstructure) - - - def get_size(self): - """Return the physical size in meter.""" - return np.copy(self.size) - - - def get_origin(self): - """Return the origin in meter.""" - return np.copy(self.origin) - - - def get_grid(self): - """Return the grid discretization.""" - return np.asarray(self.microstructure.shape) - - - def get_homogenization(self): - """Return the homogenization index.""" - return self.homogenization - - - def get_comments(self): - """Return the comments.""" - return self.comments[:] + def N_materials(self): + return np.unique(self.material).size @staticmethod - def from_file(fname): + def load_ASCII(fname): """ Read a geom file. @@ -320,12 +157,10 @@ class Geom: size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']]) elif key == 'origin': origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']]) - elif key == 'homogenization': - homogenization = int(items[1]) else: comments.append(line.strip()) - microstructure = np.empty(grid.prod()) # initialize as flat array + material = np.empty(grid.prod()) # initialize as flat array i = 0 for line in content[header_length:]: items = line.split('#')[0].split() @@ -337,20 +172,20 @@ class Geom: abs(int(items[2])-int(items[0]))+1,dtype=float) else: items = list(map(float,items)) else: items = list(map(float,items)) - microstructure[i:i+len(items)] = items + material[i:i+len(items)] = items i += len(items) if i != grid.prod(): raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}') - if not np.any(np.mod(microstructure,1) != 0.0): # no float present - microstructure = microstructure.astype('int') + if not np.any(np.mod(material,1) != 0.0): # no float present + material = material.astype('int') - return Geom(microstructure.reshape(grid,order='F'),size,origin,homogenization,comments) + return Geom(material.reshape(grid,order='F'),size,origin,comments) @staticmethod - def from_vtr(fname): + def load(fname): """ Read a VTK rectilinear grid. @@ -361,13 +196,15 @@ class Geom: Valid extension is .vtr, it will be appended if not given. """ - v = VTK.from_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') + v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') comments = v.get_comments() grid = np.array(v.vtk_data.GetDimensions())-1 bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T - size = bbox[1] - bbox[0] - return Geom(v.get('materialpoint').reshape(grid,order='F'),size,bbox[0],comments=comments) + return Geom(material = v.get('material').reshape(grid,order='F'), + size = bbox[1] - bbox[0], + origin = bbox[0], + comments=comments) @staticmethod @@ -384,7 +221,7 @@ class Geom: grid : int numpy.ndarray of shape (3) Number of grid points in x,y,z direction. size : list or numpy.ndarray of shape (3) - Physical size of the microstructure in meter. + Physical size of the geometry in meter. seeds : numpy.ndarray of shape (:,3) Position of the seed points in meter. All points need to lay within the box. weights : numpy.ndarray of shape (seeds.shape[0]) @@ -404,20 +241,21 @@ class Geom: seeds_p = seeds coords = grid_filters.cell_coord0(grid,size).reshape(-1,3) - pool = multiprocessing.Pool(processes = int(environment.options['DAMASK_NUM_THREADS'])) + pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS'])) result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords]) pool.close() pool.join() - microstructure = np.array(result.get()) + material = np.array(result.get()) if periodic: - microstructure = microstructure.reshape(grid*3) - microstructure = microstructure[grid[0]:grid[0]*2,grid[1]:grid[1]*2,grid[2]:grid[2]*2]%seeds.shape[0] + material = material.reshape(grid*3) + material = material[grid[0]:grid[0]*2,grid[1]:grid[1]*2,grid[2]:grid[2]*2]%seeds.shape[0] else: - microstructure = microstructure.reshape(grid) + material = material.reshape(grid) - return Geom(microstructure+1,size,homogenization=1, - comments=util.execution_stamp('Geom','from_Laguerre_tessellation'), + return Geom(material = material+1, + size = size, + comments = util.execution_stamp('Geom','from_Laguerre_tessellation'), ) @@ -431,7 +269,7 @@ class Geom: grid : int numpy.ndarray of shape (3) Number of grid points in x,y,z direction. size : list or numpy.ndarray of shape (3) - Physical size of the microstructure in meter. + Physical size of the geometry in meter. seeds : numpy.ndarray of shape (:,3) Position of the seed points in meter. All points need to lay within the box. periodic : Boolean, optional @@ -440,137 +278,110 @@ class Geom: """ coords = grid_filters.cell_coord0(grid,size).reshape(-1,3) KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds) - devNull,microstructure = KDTree.query(coords) + devNull,material = KDTree.query(coords) - return Geom(microstructure.reshape(grid)+1,size,homogenization=1, - comments=util.execution_stamp('Geom','from_Voronoi_tessellation'), + return Geom(material = material.reshape(grid)+1, + size = size, + comments = util.execution_stamp('Geom','from_Voronoi_tessellation'), ) - def to_file(self,fname,format='vtr',pack=None): + def save_ASCII(self,fname,compress=None): """ Writes a geom file. Parameters ---------- fname : str or file handle - Geometry file to write. - format : {'vtr', 'ASCII'}, optional - File format, defaults to 'vtr'. Available formats are: - - vtr: VTK rectilinear grid file, extension '.vtr'. - - ASCII: Plain text file, extension '.geom'. - pack : bool, optional - Compress ASCII geometry with 'x of y' and 'a to b'. + Geometry file to write with extension '.geom'. + compress : bool, optional + Compress geometry with 'x of y' and 'a to b'. """ - def _to_ASCII(geom,fname,pack=None): - """ - Writes a geom file. + header = [f'{len(self.comments)+4} header'] + self.comments \ + + ['grid a {} b {} c {}'.format(*self.grid), + 'size x {} y {} z {}'.format(*self.size), + 'origin x {} y {} z {}'.format(*self.origin), + 'homogenization 1', + ] - Parameters - ---------- - geom : Geom object - Geometry to write. - fname : str or file handle - Geometry file to write. - pack : bool, optional - Compress geometry with 'x of y' and 'a to b'. + grid = self.grid - """ - header = [f'{len(geom.comments)+4} header'] + geom.comments - header.append('grid a {} b {} c {}'.format(*geom.get_grid())) - header.append('size x {} y {} z {}'.format(*geom.get_size())) - header.append('origin x {} y {} z {}'.format(*geom.get_origin())) - header.append(f'homogenization {geom.get_homogenization()}') - - grid = geom.get_grid() - - if pack is None: - plain = grid.prod()/geom.N_microstructure < 250 - else: - plain = not pack - - if plain: - format_string = '%g' if geom.microstructure.dtype in np.sctypes['float'] else \ - '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(geom.microstructure))))) - np.savetxt(fname, - geom.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, - header='\n'.join(header), fmt=format_string, comments='') - else: - try: - f = open(fname,'w') - except TypeError: - f = fname - - compressType = None - former = start = -1 - reps = 0 - for current in geom.microstructure.flatten('F'): - if abs(current - former) == 1 and (start - current) == reps*(former - current): - compressType = 'to' - reps += 1 - elif current == former and start == former: - compressType = 'of' - reps += 1 - else: - if compressType is None: - f.write('\n'.join(header)+'\n') - elif compressType == '.': - f.write(f'{former}\n') - elif compressType == 'to': - f.write(f'{start} to {former}\n') - elif compressType == 'of': - f.write(f'{reps} of {former}\n') - - compressType = '.' - start = current - reps = 1 - - former = current - - if compressType == '.': - f.write(f'{former}\n') - elif compressType == 'to': - f.write(f'{start} to {former}\n') - elif compressType == 'of': - f.write(f'{reps} of {former}\n') - - - def _to_vtr(geom,fname=None): - """ - Generates vtk rectilinear grid. - - Parameters - ---------- - geom : Geom object - Geometry to write. - fname : str, optional - Filename to write. If no file is given, a string is returned. - Valid extension is .vtr, it will be appended if not given. - - """ - v = VTK.from_rectilinearGrid(geom.grid,geom.size,geom.origin) - v.add(geom.microstructure.flatten(order='F'),'materialpoint') - v.add_comments(geom.comments) - - if fname: - v.to_file(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') - else: - sys.stdout.write(v.__repr__()) - - if format.lower() == 'ascii': - return _to_ASCII(self,fname,pack) - elif format.lower() == 'vtr': - return _to_vtr(self,fname) + if compress is None: + plain = grid.prod()/self.N_materials < 250 else: - raise TypeError(f'Unknown format {format}.') + plain = not compress - def as_ASCII(self,pack=False): - """Format geometry as human-readable ASCII.""" - f = StringIO() - self.to_file(f,'ASCII',pack) - f.seek(0) - return ''.join(f.readlines()) + if plain: + format_string = '%g' if self.material.dtype in np.sctypes['float'] else \ + '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.material))))) + np.savetxt(fname, + self.material.reshape([grid[0],np.prod(grid[1:])],order='F').T, + header='\n'.join(header), fmt=format_string, comments='') + else: + try: + f = open(fname,'w') + except TypeError: + f = fname + + compressType = None + former = start = -1 + reps = 0 + for current in self.material.flatten('F'): + if abs(current - former) == 1 and (start - current) == reps*(former - current): + compressType = 'to' + reps += 1 + elif current == former and start == former: + compressType = 'of' + reps += 1 + else: + if compressType is None: + f.write('\n'.join(header)+'\n') + elif compressType == '.': + f.write(f'{former}\n') + elif compressType == 'to': + f.write(f'{start} to {former}\n') + elif compressType == 'of': + f.write(f'{reps} of {former}\n') + + compressType = '.' + start = current + reps = 1 + + former = current + + if compressType == '.': + f.write(f'{former}\n') + elif compressType == 'to': + f.write(f'{start} to {former}\n') + elif compressType == 'of': + f.write(f'{reps} of {former}\n') + + + def save(self,fname,compress=True): + """ + Generates vtk rectilinear grid. + + Parameters + ---------- + fname : str, optional + Filename to write. If no file is given, a string is returned. + Valid extension is .vtr, it will be appended if not given. + compress : bool, optional + Compress with zlib algorithm. Defaults to True. + + """ + v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) + v.add(self.material.flatten(order='F'),'material') + v.add_comments(self.comments) + + v.save(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr',parallel=False,compress=compress) + + + def show(self): + """Show on screen.""" + v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin) + v.show() def add_primitive(self,dimension,center,exponent, @@ -593,11 +404,11 @@ class Geom: 0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1) 1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1) fill : int, optional - Fill value for primitive. Defaults to microstructure.max() + 1. + Fill value for primitive. Defaults to material.max() + 1. R : damask.Rotation, optional Rotation of primitive. Defaults to no rotation. inverse : Boolean, optional - Retain original microstructure within primitive and fill outside. + Retain original materials within primitive and fill outside. Defaults to False. periodic : Boolean, optional Repeat primitive over boundaries. Defaults to True. @@ -619,22 +430,23 @@ class Geom: if periodic: # translate back to center mask = np.roll(mask,((c-np.ones(3)*.5)*self.grid).astype(int),(0,1,2)) - fill_ = np.full_like(self.microstructure,np.nanmax(self.microstructure)+1 if fill is None else fill) - ms = np.ma.MaskedArray(fill_,np.logical_not(mask) if inverse else mask) + fill_ = np.full_like(self.material,np.nanmax(self.material)+1 if fill is None else fill) - return self.duplicate(ms, - comments=self.get_comments()+[util.execution_stamp('Geom','add_primitive')], - ) + return Geom(material = np.where(np.logical_not(mask) if inverse else mask, self.material,fill_), + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','add_primitive')], + ) def mirror(self,directions,reflect=False): """ - Mirror microstructure along given directions. + Mirror geometry along given directions. Parameters ---------- directions : iterable containing str - Direction(s) along which the microstructure is mirrored. + Direction(s) along which the geometry is mirrored. Valid entries are 'x', 'y', 'z'. reflect : bool, optional Reflect (include) outermost layers. Defaults to False. @@ -645,28 +457,30 @@ class Geom: raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.') limits = [None,None] if reflect else [-2,0] - ms = self.get_microstructure() + mat = self.material.copy() - if 'z' in directions: - ms = np.concatenate([ms,ms[:,:,limits[0]:limits[1]:-1]],2) - if 'y' in directions: - ms = np.concatenate([ms,ms[:,limits[0]:limits[1]:-1,:]],1) if 'x' in directions: - ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0) + mat = np.concatenate([mat,mat[limits[0]:limits[1]:-1,:,:]],0) + if 'y' in directions: + mat = np.concatenate([mat,mat[:,limits[0]:limits[1]:-1,:]],1) + if 'z' in directions: + mat = np.concatenate([mat,mat[:,:,limits[0]:limits[1]:-1]],2) - return self.duplicate(ms, - comments=self.get_comments()+[util.execution_stamp('Geom','mirror')], - autosize=True) + return Geom(material = mat, + size = self.size/self.grid*np.asarray(mat.shape), + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','mirror')], + ) def flip(self,directions): """ - Flip microstructure along given directions. + Flip geometry along given directions. Parameters ---------- directions : iterable containing str - Direction(s) along which the microstructure is flipped. + Direction(s) along which the geometry is flipped. Valid entries are 'x', 'y', 'z'. """ @@ -674,16 +488,18 @@ class Geom: if not set(directions).issubset(valid): raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.') - ms = np.flip(self.microstructure, (valid.index(d) for d in directions if d in valid)) + mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid)) - return self.duplicate(ms, - comments=self.get_comments()+[util.execution_stamp('Geom','flip')], - ) + return Geom(material = mat, + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','flip')], + ) def scale(self,grid,periodic=True): """ - Scale microstructure to new grid. + Scale geometry to new grid. Parameters ---------- @@ -693,21 +509,23 @@ class Geom: Assume geometry to be periodic. Defaults to True. """ - return self.duplicate(ndimage.interpolation.zoom( - self.microstructure, - grid/self.get_grid(), - output=self.microstructure.dtype, - order=0, - mode=('wrap' if periodic else 'nearest'), - prefilter=False - ), - comments=self.get_comments()+[util.execution_stamp('Geom','scale')], - ) + return Geom(material = ndimage.interpolation.zoom( + self.material, + grid/self.grid, + output=self.material.dtype, + order=0, + mode=('wrap' if periodic else 'nearest'), + prefilter=False + ), + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','scale')], + ) def clean(self,stencil=3,selection=None,periodic=True): """ - Smooth microstructure by selecting most frequent index within given stencil at each location. + Smooth geometry by selecting most frequent material index within given stencil at each location. Parameters ---------- @@ -727,83 +545,87 @@ class Geom: else: return me - return self.duplicate(ndimage.filters.generic_filter( - self.microstructure, - mostFrequent, - size=(stencil if selection is None else stencil//2*2+1,)*3, - mode=('wrap' if periodic else 'nearest'), - extra_keywords=dict(selection=selection), - ).astype(self.microstructure.dtype), - comments=self.get_comments()+[util.execution_stamp('Geom','clean')], - ) + return Geom(material = ndimage.filters.generic_filter( + self.material, + mostFrequent, + size=(stencil if selection is None else stencil//2*2+1,)*3, + mode=('wrap' if periodic else 'nearest'), + extra_keywords=dict(selection=selection), + ).astype(self.material.dtype), + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','clean')], + ) def renumber(self): - """Renumber sorted microstructure indices to 1,...,N.""" - renumbered = np.empty(self.get_grid(),dtype=self.microstructure.dtype) - for i, oldID in enumerate(np.unique(self.microstructure)): - renumbered = np.where(self.microstructure == oldID, i+1, renumbered) + """Renumber sorted material indices to 1,...,N.""" + renumbered = np.empty(self.grid,dtype=self.material.dtype) + for i, oldID in enumerate(np.unique(self.material)): + renumbered = np.where(self.material == oldID, i+1, renumbered) - return self.duplicate(renumbered, - comments=self.get_comments()+[util.execution_stamp('Geom','renumber')], - ) + return Geom(material = renumbered, + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','renumber')], + ) def rotate(self,R,fill=None): """ - Rotate microstructure (pad if required). + Rotate geometry (pad if required). Parameters ---------- R : damask.Rotation - Rotation to apply to the microstructure. + Rotation to apply to the geometry. fill : int or float, optional - Microstructure index to fill the corners. Defaults to microstructure.max() + 1. + Material index to fill the corners. Defaults to material.max() + 1. """ - if fill is None: fill = np.nanmax(self.microstructure) + 1 - dtype = float if np.isnan(fill) or int(fill) != fill or self.microstructure.dtype==np.float else int + if fill is None: fill = np.nanmax(self.material) + 1 + dtype = float if np.isnan(fill) or int(fill) != fill or self.material.dtype==np.float else int Eulers = R.as_Eulers(degrees=True) - microstructure_in = self.get_microstructure() + material_in = self.material.copy() # These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'') # see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf for angle,axes in zip(Eulers[::-1], [(0,1),(1,2),(0,1)]): - microstructure_out = ndimage.rotate(microstructure_in,angle,axes,order=0, + material_out = ndimage.rotate(material_in,angle,axes,order=0, prefilter=False,output=dtype,cval=fill) - if np.prod(microstructure_in.shape) == np.prod(microstructure_out.shape): + if np.prod(material_in.shape) == np.prod(material_out.shape): # avoid scipy interpolation errors for rotations close to multiples of 90° - microstructure_in = np.rot90(microstructure_in,k=np.rint(angle/90.).astype(int),axes=axes) + material_in = np.rot90(material_in,k=np.rint(angle/90.).astype(int),axes=axes) else: - microstructure_in = microstructure_out + material_in = material_out - origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid + origin = self.origin-(np.asarray(material_in.shape)-self.grid)*.5 * self.size/self.grid - return self.duplicate(microstructure_in, - origin=origin, - comments=self.get_comments()+[util.execution_stamp('Geom','rotate')], - autosize=True, - ) + return Geom(material = material_in, + size = self.size/self.grid*np.asarray(material_in.shape), + origin = origin, + comments = self.comments+[util.execution_stamp('Geom','rotate')], + ) def canvas(self,grid=None,offset=None,fill=None): """ - Crop or enlarge/pad microstructure. + Crop or enlarge/pad geometry. Parameters ---------- grid : numpy.ndarray of shape (3) Number of grid points in x,y,z direction. offset : numpy.ndarray of shape (3) - Offset (measured in grid points) from old to new microstructure[0,0,0]. + Offset (measured in grid points) from old to new geometry [0,0,0]. fill : int or float, optional - Microstructure index to fill the background. Defaults to microstructure.max() + 1. + Material index to fill the background. Defaults to material.max() + 1. """ if offset is None: offset = 0 - if fill is None: fill = np.nanmax(self.microstructure) + 1 - dtype = float if int(fill) != fill or self.microstructure.dtype in np.sctypes['float'] else int + if fill is None: fill = np.nanmax(self.material) + 1 + dtype = float if int(fill) != fill or self.material.dtype in np.sctypes['float'] else int canvas = np.full(self.grid if grid is None else grid,fill,dtype) @@ -812,39 +634,41 @@ class Geom: ll = np.clip(-offset, 0,np.minimum( grid,self.grid-offset)) ur = np.clip(-offset+self.grid,0,np.minimum( grid,self.grid-offset)) - canvas[ll[0]:ur[0],ll[1]:ur[1],ll[2]:ur[2]] = self.microstructure[LL[0]:UR[0],LL[1]:UR[1],LL[2]:UR[2]] + canvas[ll[0]:ur[0],ll[1]:ur[1],ll[2]:ur[2]] = self.material[LL[0]:UR[0],LL[1]:UR[1],LL[2]:UR[2]] - return self.duplicate(canvas, - origin=self.origin+offset*self.size/self.grid, - comments=self.get_comments()+[util.execution_stamp('Geom','canvas')], - autosize=True, - ) + return Geom(material = canvas, + size = self.size/self.grid*np.asarray(canvas.shape), + origin = self.origin+offset*self.size/self.grid, + comments = self.comments+[util.execution_stamp('Geom','canvas')], + ) - def substitute(self,from_microstructure,to_microstructure): + def substitute(self,from_material,to_material): """ - Substitute microstructure indices. + Substitute material indices. Parameters ---------- - from_microstructure : iterable of ints - Microstructure indices to be substituted. - to_microstructure : iterable of ints - New microstructure indices. + from_material : iterable of ints + Material indices to be substituted. + to_material : iterable of ints + New material indices. """ - substituted = self.get_microstructure() - for from_ms,to_ms in zip(from_microstructure,to_microstructure): - substituted[self.microstructure==from_ms] = to_ms + substituted = self.material.copy() + for from_ms,to_ms in zip(from_material,to_material): + substituted[self.material==from_ms] = to_ms - return self.duplicate(substituted, - comments=self.get_comments()+[util.execution_stamp('Geom','substitute')], - ) + return Geom(material = substituted, + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','substitute')], + ) def vicinity_offset(self,vicinity=1,offset=None,trigger=[],periodic=True): """ - Offset microstructure index of points in the vicinity of xxx. + Offset material index of points in the vicinity of xxx. Different from themselves (or listed as triggers) within a given (cubic) vicinity, i.e. within the region close to a grain/phase boundary. @@ -853,14 +677,14 @@ class Geom: Parameters ---------- vicinity : int, optional - Voxel distance checked for presence of other microstructure. + Voxel distance checked for presence of other materials. Defaults to 1. offset : int, optional - Offset (positive or negative) to tag microstructure indices, - defaults to microstructure.max() + 1. + Offset (positive or negative) to tag material indices, + defaults to material.max() + 1. trigger : list of ints, optional - List of microstructure indices triggering a change. - Defaults to [], meaning that different neigboors trigger a change. + List of material indices that trigger a change. + Defaults to [], meaning that any different neighbor triggers a change. periodic : Boolean, optional Assume geometry to be periodic. Defaults to True. @@ -876,14 +700,15 @@ class Geom: trigger = list(trigger) return np.any(np.in1d(stencil,np.array(trigger))) - offset_ = np.nanmax(self.microstructure) if offset is None else offset - mask = ndimage.filters.generic_filter(self.microstructure, + offset_ = np.nanmax(self.material) if offset is None else offset + mask = ndimage.filters.generic_filter(self.material, tainted_neighborhood, size=1+2*vicinity, mode='wrap' if periodic else 'nearest', extra_keywords={'trigger':trigger}) - microstructure = np.ma.MaskedArray(self.microstructure + offset_, np.logical_not(mask)) - return self.duplicate(microstructure, - comments=self.get_comments()+[util.execution_stamp('Geom','vicinity_offset')], - ) + return Geom(material = np.where(mask, self.material + offset_,self.material), + size = self.size, + origin = self.origin, + comments = self.comments+[util.execution_stamp('Geom','vicinity_offset')], + ) diff --git a/python/damask/_material.py b/python/damask/_material.py new file mode 100644 index 000000000..106d1582e --- /dev/null +++ b/python/damask/_material.py @@ -0,0 +1,193 @@ +from io import StringIO +import copy + +import yaml +import numpy as np + +from . import Lattice +from . import Rotation + +class NiceDumper(yaml.SafeDumper): + """Make YAML readable for humans.""" + + def write_line_break(self, data=None): + super().write_line_break(data) + + if len(self.indents) == 1: + super().write_line_break() + + def increase_indent(self, flow=False, indentless=False): + return super().increase_indent(flow, False) + + +class Material(dict): + """Material configuration.""" + + def __repr__(self): + """Show as in file.""" + output = StringIO() + self.save(output) + output.seek(0) + return ''.join(output.readlines()) + + @staticmethod + def load(fname): + """Load from yaml file.""" + try: + fhandle = open(fname) + except TypeError: + fhandle = fname + return Material(yaml.safe_load(fhandle)) + + def save(self,fname='material.yaml'): + """ + Save to yaml file. + + Parameters + ---------- + fname : file, str, or pathlib.Path + Filename or file for reading. + + """ + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname + fhandle.write(yaml.dump(dict(self),width=256,default_flow_style=None,Dumper=NiceDumper)) + + + @property + def is_complete(self): + """Check for completeness.""" + ok = True + for top_level in ['homogenization','phase','microstructure']: + # ToDo: With python 3.8 as prerequisite we can shorten with := + ok &= top_level in self + if top_level not in self: print(f'{top_level} entry missing') + + if ok: + ok &= len(self['microstructure']) > 0 + if len(self['microstructure']) < 1: print('Incomplete microstructure definition') + + if ok: + homogenization = set() + phase = set() + for i,v in enumerate(self['microstructure']): + if 'homogenization' in v: + homogenization.add(v['homogenization']) + else: + print(f'No homogenization specified in microstructure {i}') + ok = False + + if 'constituents' in v: + for ii,vv in enumerate(v['constituents']): + if 'orientation' not in vv: + print('No orientation specified in constituent {ii} of microstructure {i}') + ok = False + if 'phase' in vv: + phase.add(vv['phase']) + else: + print(f'No phase specified in constituent {ii} of microstructure {i}') + ok = False + + for k,v in self['phase'].items(): + if 'lattice' not in v: + print(f'No lattice specified in phase {k}') + ok = False + + #for k,v in self['homogenization'].items(): + # if 'N_constituents' not in v: + # print(f'No. of constituents not specified in homogenization {k}'} + # ok = False + + if phase - set(self['phase']): + print(f'Phase(s) {phase-set(self["phase"])} missing') + ok = False + if homogenization - set(self['homogenization']): + print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing') + ok = False + + return ok + + + @property + def is_valid(self): + """Check for valid file layout.""" + ok = True + + if 'phase' in self: + for k,v in self['phase'].items(): + if 'lattice' in v: + try: + Lattice(v['lattice']) + except KeyError: + s = v['lattice'] + print(f"Invalid lattice: '{s}' in phase '{k}'") + ok = False + + if 'microstructure' in self: + for i,v in enumerate(self['microstructure']): + if 'constituents' in v: + f = 0.0 + for c in v['constituents']: + f+= float(c['fraction']) + if 'orientation' in c: + try: + Rotation.from_quaternion(c['orientation']) + except ValueError: + o = c['orientation'] + print(f"Invalid orientation: '{o}' in microstructure '{i}'") + ok = False + if not np.isclose(f,1.0): + print(f"Invalid total fraction '{f}' in microstructure '{i}'") + ok = False + + return ok + + + def microstructure_rename_phase(self,mapping,ID=None,constituent=None): + """ + Change phase name in microstructure. + + Parameters + ---------- + mapping: dictionary + Mapping from old name to new name + ID: list of ints, optional + Limit renaming to selected microstructure IDs. + constituent: list of ints, optional + Limit renaming to selected constituents. + + """ + dup = copy.deepcopy(self) + for i,m in enumerate(dup['microstructure']): + if ID and i not in ID: continue + for c in m['constituents']: + if constituent is not None and c not in constituent: continue + try: + c['phase'] = mapping[c['phase']] + except KeyError: + continue + return dup + + + def microstructure_rename_homogenization(self,mapping,ID=None): + """ + Change homogenization name in microstructure. + + Parameters + ---------- + mapping: dictionary + Mapping from old name to new name + ID: list of ints, optional + Limit renaming to selected homogenization IDs. + + """ + dup = copy.deepcopy(self) + for i,m in enumerate(dup['microstructure']): + if ID and i not in ID: continue + try: + m['homogenization'] = mapping[m['homogenization']] + except KeyError: + continue + return dup diff --git a/python/damask/_result.py b/python/damask/_result.py index 723a9f3eb..5e8a9a9d0 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1100,7 +1100,7 @@ class Result: pool.join() - def write_XDMF(self): + def save_XDMF(self): """ Write XDMF file to directly visualize data in DADF5 file. @@ -1196,7 +1196,7 @@ class Result: f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml()) - def to_vtk(self,labels=[],mode='cell'): + def save_vtk(self,labels=[],mode='cell'): """ Export to vtk cell/point data. @@ -1268,4 +1268,4 @@ class Result: u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p')) v.add(u,'u') - v.to_file(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}') + v.save(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}') diff --git a/python/damask/_table.py b/python/damask/_table.py index b4fd2975a..431cf1886 100644 --- a/python/damask/_table.py +++ b/python/damask/_table.py @@ -27,8 +27,11 @@ class Table: self.comments = [] if comments_ is None else [c for c in comments_] self.data = pd.DataFrame(data=data) self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() } - self._label_condensed() + self._label_uniform() + def __repr__(self): + """Brief overview.""" + return util.srepr(self.comments)+'\n'+self.data.__repr__() def __copy__(self): """Copy Table.""" @@ -39,7 +42,7 @@ class Table: return self.__copy__() - def _label_flat(self): + def _label_discrete(self): """Label data individually, e.g. v v v ==> 1_v 2_v 3_v.""" labels = [] for label,shape in self.shapes.items(): @@ -48,8 +51,8 @@ class Table: self.data.columns = labels - def _label_condensed(self): - """Label data condensed, e.g. 1_v 2_v 3_v ==> v v v.""" + def _label_uniform(self): + """Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v.""" labels = [] for label,shape in self.shapes.items(): labels += [label] * int(np.prod(shape)) @@ -64,12 +67,15 @@ class Table: @staticmethod - def from_ASCII(fname): + def load(fname): """ - Create table from ASCII file. + Load ASCII table file. - The first line can indicate the number of subsequent header lines as 'n header', - alternatively first line is the header and comments are marked by '#' ('new style'). + In legacy style, the first line indicates the number of + subsequent header lines as "N header", with the last header line being + interpreted as column labels. + Alternatively, initial comments are marked by '#', with the first non-comment line + containing the column labels. Vector data column labels are indicated by '1_v, 2_v, ..., n_v'. Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'. @@ -119,9 +125,9 @@ class Table: return Table(data,shapes,comments) @staticmethod - def from_ang(fname): + def load_ang(fname): """ - Create table from TSL ang file. + Load ang file. A valid TSL ang file needs to contains the following columns: * Euler angles (Bunge notation) in radians, 3 floats, label 'eu'. @@ -289,9 +295,9 @@ class Table: """ dup = self.copy() - dup._label_flat() + dup._label_discrete() dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending) - dup._label_condensed() + dup._label_uniform() dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}') return dup @@ -338,59 +344,38 @@ class Table: return dup - def to_file(self,fname,format='ASCII',new_style=False): + def save(self,fname,legacy=False): """ - Store as plain text file. + Save as plain text file. Parameters ---------- fname : file, str, or pathlib.Path Filename or file for writing. - format : {ASCII'}, optional - File format, defaults to 'ASCII'. Available formats are: - - ASCII: Plain text file, extension '.txt'. - new_style : Boolean, optional - Write table in new style, indicating header lines by comment sign ('#') only. + legacy : Boolean, optional + Write table in legacy style, indicating header lines by "N header" + in contrast to using comment sign ('#') at beginning of lines. """ - def _to_ASCII(table,fname,new_style=False): - """ - Store as plain text file. + seen = set() + labels = [] + for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]: + if self.shapes[l] == (1,): + labels.append(f'{l}') + elif len(self.shapes[l]) == 1: + labels += [f'{i+1}_{l}' \ + for i in range(self.shapes[l][0])] + else: + labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \ + for i in range(np.prod(self.shapes[l]))] - Parameters - ---------- - table : Table object - Table to write. - fname : file, str, or pathlib.Path - Filename or file for writing. - new_style : Boolean, optional - Write table in new style, indicating header lines by comment sign ('#') only. + header = ([f'{len(self.comments)+1} header'] + self.comments) if legacy else \ + [f'# {comment}' for comment in self.comments] - """ - seen = set() - labels = [] - for l in [x for x in table.data.columns if not (x in seen or seen.add(x))]: - if table.shapes[l] == (1,): - labels.append(f'{l}') - elif len(table.shapes[l]) == 1: - labels += [f'{i+1}_{l}' \ - for i in range(table.shapes[l][0])] - else: - labels += [f'{util.srepr(table.shapes[l],"x")}:{i+1}_{l}' \ - for i in range(np.prod(table.shapes[l]))] + try: + fhandle = open(fname,'w') + except TypeError: + fhandle = fname - header = [f'# {comment}' for comment in table.comments] if new_style else \ - [f'{len(table.comments)+1} header'] + table.comments - - try: - f = open(fname,'w') - except TypeError: - f = fname - - for line in header + [' '.join(labels)]: f.write(line+'\n') - table.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False) - - if format.lower() == 'ascii': - return _to_ASCII(self,fname,new_style) - else: - raise TypeError(f'Unknown format {format}.') + for line in header + [' '.join(labels)]: fhandle.write(line+'\n') + self.data.to_csv(fhandle,sep=' ',na_rep='nan',index=False,header=False) diff --git a/python/damask/_test.py b/python/damask/_test.py index 78616fabf..5cadc9dfe 100644 --- a/python/damask/_test.py +++ b/python/damask/_test.py @@ -228,7 +228,7 @@ class Test: def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): - source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) + source = os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) if len(targetfiles) == 0: targetfiles = sourcefiles for i,f in enumerate(sourcefiles): try: @@ -287,30 +287,30 @@ class Test: import numpy as np logging.info('\n '.join(['comparing',File1,File2])) - table = damask.Table.from_ASCII(File1) - len1=len(table.comments)+2 - table = damask.Table.from_ASCII(File2) - len2=len(table.comments)+2 + table = damask.Table.load(File1) + len1 = len(table.comments)+2 + table = damask.Table.load(File2) + len2 = len(table.comments)+2 refArray = np.nan_to_num(np.genfromtxt(File1,missing_values='n/a',skip_header = len1,autostrip=True)) curArray = np.nan_to_num(np.genfromtxt(File2,missing_values='n/a',skip_header = len2,autostrip=True)) if len(curArray) == len(refArray): refArrayNonZero = refArray[refArray.nonzero()] - curArray = curArray[refArray.nonzero()] - max_err=np.max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) - max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) + curArray = curArray[refArray.nonzero()] + max_err = np. max(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) + max_loc = np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) refArrayNonZero = refArrayNonZero[curArray.nonzero()] - curArray = curArray[curArray.nonzero()] + curArray = curArray[curArray.nonzero()] print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********') return max_err else: - raise Exception('mismatch in array size to compare') + raise Exception(f'mismatch in array sizes ({len(refArray)} and {len(curArray)}) to compare') def compare_ArrayRefCur(self,ref,cur=''): - if cur =='': cur = ref + if cur == '': cur = ref refName = self.fileInReference(ref) curName = self.fileInCurrent(cur) return self.compare_Array(refName,curName) @@ -331,7 +331,7 @@ class Test: logging.info('\n '.join(['comparing ASCII Tables',file0,file1])) if normHeadings == '': normHeadings = headings0 -# check if comparison is possible and determine lenght of columns +# check if comparison is possible and determine length of columns if len(headings0) == len(headings1) == len(normHeadings): dataLength = len(headings0) length = [1 for i in range(dataLength)] @@ -399,10 +399,8 @@ class Test: if any(norm[i]) == 0.0 or absTol[i]: norm[i] = [1.0 for j in range(line0-len(skipLines))] absTol[i] = True - if perLine: - logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance") - else: - logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance") + logging.warning(f'''{"At least one" if perLine else "Maximum"} norm of + "{headings0[i]['label']}" in first table is 0.0, using absolute tolerance''') line1 = 0 while table1.data_read(): # read next data line of ASCII table @@ -418,20 +416,18 @@ class Test: logging.info(' ********') for i in range(dataLength): - if absTol[i]: - logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}") - else: - logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}") + logging.info(f''' * maximum {'absolute' if absTol[i] else 'relative'} error {maxError[i]} + between {headings0[i]['label']} and {headings1[i]['label']}''') logging.info(' ********') return maxError def compare_TablesStatistically(self, - files = [None,None], # list of file names - columns = [None], # list of list of column labels (per file) - meanTol = 1.0e-4, - stdTol = 1.0e-6, - preFilter = 1.0e-9): + files = [None,None], # list of file names + columns = [None], # list of list of column labels (per file) + meanTol = 1.0e-4, + stdTol = 1.0e-6, + preFilter = 1.0e-9): """ Calculate statistics of tables. @@ -440,9 +436,9 @@ class Test: if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested files = [str(files)] - tables = [damask.Table.from_ASCII(filename) for filename in files] + tables = [damask.Table.load(filename) for filename in files] for table in tables: - table._label_flat() + table._label_discrete() columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files columns = columns[:len(files)] # truncate to same length as files @@ -462,7 +458,7 @@ class Test: data = [] for table,labels in zip(tables,columns): - table._label_condensed() + table._label_uniform() data.append(np.hstack(list(table.get(label) for label in labels))) @@ -471,12 +467,11 @@ class Test: normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 normedDelta = np.where(normBy>preFilter,delta/normBy,0.0) mean = np.amax(np.abs(np.mean(normedDelta,0))) - std = np.amax(np.std(normedDelta,0)) + std = np.amax(np.std(normedDelta,0)) logging.info(f'mean: {mean:f}') logging.info(f'std: {std:f}') - return (mean + + + AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6 + + - + AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA== diff --git a/python/tests/reference/Geom/clean_2_1+2+3_True.vtr b/python/tests/reference/Geom/clean_2_1+2+3_True.vtr index eb2d1b64a..eafbde5a4 100644 --- a/python/tests/reference/Geom/clean_2_1+2+3_True.vtr +++ b/python/tests/reference/Geom/clean_2_1+2+3_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA== + + - + AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg== diff --git a/python/tests/reference/Geom/clean_2_1_False.vtr b/python/tests/reference/Geom/clean_2_1_False.vtr index e77743025..c04d48be6 100644 --- a/python/tests/reference/Geom/clean_2_1_False.vtr +++ b/python/tests/reference/Geom/clean_2_1_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ== + + - + AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg== diff --git a/python/tests/reference/Geom/clean_2_1_True.vtr b/python/tests/reference/Geom/clean_2_1_True.vtr index d8a3d4169..19c277c7f 100644 --- a/python/tests/reference/Geom/clean_2_1_True.vtr +++ b/python/tests/reference/Geom/clean_2_1_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA== + + - + AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ diff --git a/python/tests/reference/Geom/clean_2_None_False.vtr b/python/tests/reference/Geom/clean_2_None_False.vtr index 068186206..58e8cb8fb 100644 --- a/python/tests/reference/Geom/clean_2_None_False.vtr +++ b/python/tests/reference/Geom/clean_2_None_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ== + + - + AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw diff --git a/python/tests/reference/Geom/clean_2_None_True.vtr b/python/tests/reference/Geom/clean_2_None_True.vtr index e72fa97e1..37bb3b08b 100644 --- a/python/tests/reference/Geom/clean_2_None_True.vtr +++ b/python/tests/reference/Geom/clean_2_None_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA== + + - + AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q== diff --git a/python/tests/reference/Geom/clean_3_1+2+3_False.vtr b/python/tests/reference/Geom/clean_3_1+2+3_False.vtr index 5400fcdb6..955c58a2a 100644 --- a/python/tests/reference/Geom/clean_3_1+2+3_False.vtr +++ b/python/tests/reference/Geom/clean_3_1+2+3_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6 + + - + AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA== diff --git a/python/tests/reference/Geom/clean_3_1+2+3_True.vtr b/python/tests/reference/Geom/clean_3_1+2+3_True.vtr index eb2d1b64a..eafbde5a4 100644 --- a/python/tests/reference/Geom/clean_3_1+2+3_True.vtr +++ b/python/tests/reference/Geom/clean_3_1+2+3_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA== + + - + AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg== diff --git a/python/tests/reference/Geom/clean_3_1_False.vtr b/python/tests/reference/Geom/clean_3_1_False.vtr index e77743025..c04d48be6 100644 --- a/python/tests/reference/Geom/clean_3_1_False.vtr +++ b/python/tests/reference/Geom/clean_3_1_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ== + + - + AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg== diff --git a/python/tests/reference/Geom/clean_3_1_True.vtr b/python/tests/reference/Geom/clean_3_1_True.vtr index d8a3d4169..2fb4afd9c 100644 --- a/python/tests/reference/Geom/clean_3_1_True.vtr +++ b/python/tests/reference/Geom/clean_3_1_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA== + + - + AQAAAACAAAAABQAAYwAAAA==eF7t0scBgkAAAEHBgBEwgDmBsf8GfTANCN/bzzSwUa8pCrYyZp8DDjliwjEnnHLGORdMmTHnkiuuuWHBklvuuOeBR5545oVX3nhnxZoPPvnimx9+GQc7GT5sqvjvhz+ZtAcJ diff --git a/python/tests/reference/Geom/clean_3_None_False.vtr b/python/tests/reference/Geom/clean_3_None_False.vtr index f7cd54cc0..d63601aa6 100644 --- a/python/tests/reference/Geom/clean_3_None_False.vtr +++ b/python/tests/reference/Geom/clean_3_None_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ== + + - + AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q== diff --git a/python/tests/reference/Geom/clean_3_None_True.vtr b/python/tests/reference/Geom/clean_3_None_True.vtr index 2ebca1695..ffdb40218 100644 --- a/python/tests/reference/Geom/clean_3_None_True.vtr +++ b/python/tests/reference/Geom/clean_3_None_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA== + + - + AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU= diff --git a/python/tests/reference/Geom/clean_4_1+2+3_False.vtr b/python/tests/reference/Geom/clean_4_1+2+3_False.vtr index 20f83428f..dfb12dae0 100644 --- a/python/tests/reference/Geom/clean_4_1+2+3_False.vtr +++ b/python/tests/reference/Geom/clean_4_1+2+3_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6 + + - + AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ= diff --git a/python/tests/reference/Geom/clean_4_1+2+3_True.vtr b/python/tests/reference/Geom/clean_4_1+2+3_True.vtr index ae1490bce..5c228bb32 100644 --- a/python/tests/reference/Geom/clean_4_1+2+3_True.vtr +++ b/python/tests/reference/Geom/clean_4_1+2+3_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA== + + - + AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA== diff --git a/python/tests/reference/Geom/clean_4_1_False.vtr b/python/tests/reference/Geom/clean_4_1_False.vtr index e77743025..1d4fe29b9 100644 --- a/python/tests/reference/Geom/clean_4_1_False.vtr +++ b/python/tests/reference/Geom/clean_4_1_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATgAAAA==eF4FwUEKgCAUBNCO4rIWX8ZJsbxA5/iUFqQVBJ2/9zZt+p52yXeza816mW+0sBCtz6HCGGSPE1wJjMX0BCGYhTQuJLrkKfDA0P0d3xK6 + + - + AQAAAACAAAAABQAAZAAAAA==eF7t0scRglAAQEEBAyZUMCuomPtv0ANbgMNw/O+yDbyo1xQFWxkzYZ8DDjliyjEnnHLGOTMuuOSKOQuuueGWO+554JEnnlmy4oVX3ljzzgeffPHND7+Mg50aPmz698MfmvQHCg== diff --git a/python/tests/reference/Geom/clean_4_1_True.vtr b/python/tests/reference/Geom/clean_4_1_True.vtr index d042cf7b4..8e8b5b8ef 100644 --- a/python/tests/reference/Geom/clean_4_1_True.vtr +++ b/python/tests/reference/Geom/clean_4_1_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMoM9Yz0DPQTcwpyEjUNTI31U03tzAwTDM1Mk9T0DAyMDLQNbDUNTJSMDS1MjK0MgFyTQwMNBkAHc8SuA== + + - + AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg== diff --git a/python/tests/reference/Geom/clean_4_None_False.vtr b/python/tests/reference/Geom/clean_4_None_False.vtr index 686f5a190..91863290c 100644 --- a/python/tests/reference/Geom/clean_4_None_False.vtr +++ b/python/tests/reference/Geom/clean_4_None_False.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwdEJgDAMBUBH6ad+JLzElmoXcI6grYKtCoLze7dZs/fkJd+N15rtct/IYJDV5zDSGGiPE6QEjcX1CgVhJlUnIakkLwQPDN0PHdcSuQ== + + - + AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU= diff --git a/python/tests/reference/Geom/clean_4_None_True.vtr b/python/tests/reference/Geom/clean_4_None_True.vtr index 66109b418..8e3375229 100644 --- a/python/tests/reference/Geom/clean_4_None_True.vtr +++ b/python/tests/reference/Geom/clean_4_None_True.vtr @@ -1,11 +1,16 @@ + + + AQAAAACAAABJAAAATwAAAA==eF4FwVEKgCAQBcCO4md97PJcE9MLdI6ltCCtIOj8zuza9Lt4zU/jrWa9ze8YDNL6nkoSPB1hgS1eQjGjQECIJGKsT2KTi4QZmIYOHg4SwA== + + - + AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh diff --git a/python/tests/reference/Material/material.yaml b/python/tests/reference/Material/material.yaml new file mode 100644 index 000000000..1d3cf08a8 --- /dev/null +++ b/python/tests/reference/Material/material.yaml @@ -0,0 +1,42 @@ +homogenization: + SX: + mech: {type: none} + Taylor: + mech: {type: isostrain, N_constituents: 2} + +microstructure: + - constituents: + - fraction: 1.0 + orientation: [1.0, 0.0, 0.0, 0.0] + phase: Aluminum + homogenization: SX + - constituents: + - fraction: 1.0 + orientation: [0.7936696712125002, -0.28765777461664166, -0.3436487135089419, 0.4113964260949434] + phase: Aluminum + homogenization: SX + - constituents: + - fraction: 1.0 + orientation: [0.3986143167493579, -0.7014883552495493, 0.2154871765709027, 0.5500781677772945] + phase: Aluminum + homogenization: SX + - homogenization: Taylor + constituents: + - fraction: .5 + orientation: [0.28645844315788244, -0.022571491243423537, -0.467933059311115, -0.8357456192708106] + phase: Aluminum + - fraction: .5 + orientation: [0.3986143167493579, -0.7014883552495493, 0.2154871765709027, 0.5500781677772945] + phase: Steel + +phase: + Aluminum: + elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke} + generic: + output: [F, P, Fe, Fp, Lp] + lattice: fcc + Steel: + elasticity: {C_11: 233.3e9, C_12: 135.5e9, C_44: 118.0e9, type: hooke} + generic: + output: [F, P, Fe, Fp, Lp] + lattice: bcc diff --git a/python/tests/reference/Rotation/Visualize_hybridIA_sampling.ipynb b/python/tests/reference/Rotation/Visualize_hybridIA_sampling.ipynb new file mode 100644 index 000000000..2f6b8cb22 --- /dev/null +++ b/python/tests/reference/Rotation/Visualize_hybridIA_sampling.ipynb @@ -0,0 +1,276 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import damask\n", + "\n", + "from pathlib import Path" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "orientations,rODF = damask.Rotation.from_ODF('hybridIA_ODF.txt',\n", + " 2**14,\n", + " degrees=True,\n", + " reconstruct=True,\n", + " fractions=True,\n", + " seed=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "VTK = damask.VTK.from_rectilinearGrid([36,36,36],[90,90,90])\n", + "VTK.add(damask.Table.from_ASCII('hybridIA_ODF.txt').get('intensity'),'intensity')\n", + "VTK.add(rODF.flatten(order='F'),'rODF')\n", + "VTK.to_file('hybridIA_ODF.vtr')" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Help on class VTK in module damask._vtk:\n", + "\n", + "class VTK(builtins.object)\n", + " | VTK(geom)\n", + " | \n", + " | Spatial visualization (and potentially manipulation).\n", + " | \n", + " | High-level interface to VTK.\n", + " | \n", + " | Methods defined here:\n", + " | \n", + " | __init__(self, geom)\n", + " | Set geometry and topology.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | geom : subclass of vtk.vtkDataSet\n", + " | Description of geometry and topology. Valid types are vtk.vtkRectilinearGrid,\n", + " | vtk.vtkUnstructuredGrid, or vtk.vtkPolyData.\n", + " | \n", + " | __repr__(self)\n", + " | ASCII representation of the VTK data.\n", + " | \n", + " | add(self, data, label=None)\n", + " | Add data to either cells or points.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | data : numpy.ndarray\n", + " | Data to add. First dimension need to match either\n", + " | number of cells or number of points\n", + " | label : str\n", + " | Data label.\n", + " | \n", + " | add_comments(self, comments)\n", + " | Add Comments.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | comments : str or list of str\n", + " | Comments to add.\n", + " | \n", + " | get(self, label)\n", + " | Get either cell or point data.\n", + " | \n", + " | Cell data takes precedence over point data, i.e. this\n", + " | function assumes that labels are unique among cell and\n", + " | point data.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | label : str\n", + " | Data label.\n", + " | \n", + " | get_comments(self)\n", + " | Return the comments.\n", + " | \n", + " | set_comments(self, comments)\n", + " | Set Comments.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | comments : str or list of str\n", + " | Comments.\n", + " | \n", + " | show(self)\n", + " | Render.\n", + " | \n", + " | See http://compilatrix.com/article/vtk-1 for further ideas.\n", + " | \n", + " | write(self, fname, parallel=True)\n", + " | Write to file.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | fname : str or pathlib.Path\n", + " | Filename for writing.\n", + " | parallel : boolean, optional\n", + " | Write data in parallel background process. Defaults to True.\n", + " | \n", + " | ----------------------------------------------------------------------\n", + " | Static methods defined here:\n", + " | \n", + " | from_file(fname, dataset_type=None)\n", + " | Create VTK from file.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | fname : str or pathlib.Path\n", + " | Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.\n", + " | dataset_type : str, optional\n", + " | Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,\n", + " | vtkUnstructuredGrid, and vtkPolyData.\n", + " | \n", + " | from_polyData(points)\n", + " | Create VTK of type vtk.polyData.\n", + " | \n", + " | This is the common type for point-wise data.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | points : numpy.ndarray of shape (:,3)\n", + " | Spatial position of the points.\n", + " | \n", + " | from_rectilinearGrid(grid, size, origin=array([0., 0., 0.]))\n", + " | Create VTK of type vtk.vtkRectilinearGrid.\n", + " | \n", + " | This is the common type for results from the grid solver.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | grid : numpy.ndarray of shape (3) of np.dtype = int\n", + " | Number of cells.\n", + " | size : numpy.ndarray of shape (3)\n", + " | Physical length.\n", + " | origin : numpy.ndarray of shape (3), optional\n", + " | Spatial origin.\n", + " | \n", + " | from_unstructuredGrid(nodes, connectivity, cell_type)\n", + " | Create VTK of type vtk.vtkUnstructuredGrid.\n", + " | \n", + " | This is the common type for results from FEM solvers.\n", + " | \n", + " | Parameters\n", + " | ----------\n", + " | nodes : numpy.ndarray of shape (:,3)\n", + " | Spatial position of the nodes.\n", + " | connectivity : numpy.ndarray of np.dtype = int\n", + " | Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.\n", + " | cell_type : str\n", + " | Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.\n", + " | \n", + " | ----------------------------------------------------------------------\n", + " | Data descriptors defined here:\n", + " | \n", + " | __dict__\n", + " | dictionary for instance variables (if defined)\n", + " | \n", + " | __weakref__\n", + " | list of weak references to the object (if defined)\n", + "\n" + ] + } + ], + "source": [ + "help(damask.VTK)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "a,b=np.radians(([90,90],[45,45]))" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([1.57079633, 1.57079633])" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "a" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([0.78539816, 0.78539816])" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "b" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/python/tests/test_Colormap.py b/python/tests/test_Colormap.py index 22c208551..7f12469f8 100644 --- a/python/tests/test_Colormap.py +++ b/python/tests/test_Colormap.py @@ -75,41 +75,36 @@ class TestColormap: assert np.allclose(Colormap._xyz2msh(xyz),msh,atol=1.e-6,rtol=0) - @pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh']) + @pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh']) @pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh']) def test_from_range(self,model,format,tmpdir): N = np.random.randint(2,256) - c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N) - c.to_file(tmpdir/'color_out',format=format) + c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N) # noqa + eval(f'c.save_{format}(tmpdir/"color_out")') - @pytest.mark.parametrize('format',['ASCII','paraview','GOM','Gmsh']) + @pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh']) @pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis']) def test_from_predefined(self,name,format,tmpdir): N = np.random.randint(2,256) - c = Colormap.from_predefined(name,N) + c = Colormap.from_predefined(name,N) # noqa os.chdir(tmpdir) - c.to_file(format=format) + eval(f'c.save_{format}()') @pytest.mark.parametrize('format,name',[('ASCII','test.txt'), ('paraview','test.json'), ('GOM','test.legend'), - ('Gmsh','test.msh') + ('gmsh','test.msh') ]) def test_write_filehandle(self,format,name,tmpdir): - c = Colormap.from_predefined('Dark2') + c = Colormap.from_predefined('Dark2') # noqa fname = tmpdir/name - with open(fname,'w') as f: - c.to_file(f,format=format) + with open(fname,'w') as f: # noqa + eval(f'c.save_{format}(f)') for i in range(10): if fname.exists(): return time.sleep(.5) assert False - def test_write_invalid_format(self): - c = Colormap.from_predefined('Dark2') - with pytest.raises(ValueError): - c.to_file(format='invalid') - @pytest.mark.parametrize('model',['rgb','hsv','hsl','lab','invalid']) def test_invalid_color(self,model): with pytest.raises(ValueError): @@ -119,13 +114,13 @@ class TestColormap: c_1 = Colormap.from_predefined('stress') c_2 = c_1.reversed() assert (not np.allclose(c_1.colors,c_2.colors)) and \ - np.allclose(c_1.colors,c_2.reversed().colors) + np.allclose(c_1.colors,c_2.reversed().colors) def test_invert(self): c_1 = Colormap.from_predefined('strain') c_2 = ~c_1 - assert (not np.allclose(c_1.colors,c_2.colors)) and \ - np.allclose(c_1.colors,(~c_2).colors) + assert (not np.allclose(c_1.colors, c_2.colors)) and \ + np.allclose(c_1.colors,(~c_2).colors) def test_add(self): c = Colormap.from_predefined('jet') @@ -149,16 +144,16 @@ class TestColormap: @pytest.mark.parametrize('format,ext',[('ASCII','.txt'), ('paraview','.json'), ('GOM','.legend'), - ('Gmsh','.msh') + ('gmsh','.msh') ]) def test_compare_reference(self,format,ext,tmpdir,reference_dir,update): name = 'binary' - c = Colormap.from_predefined(name) + c = Colormap.from_predefined(name) # noqa if update: os.chdir(reference_dir) - c.to_file(format=format) + eval(f'c.save_{format}()') else: os.chdir(tmpdir) - c.to_file(format=format) + eval(f'c.save_{format}()') time.sleep(.5) assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext)) diff --git a/python/tests/test_Geom.py b/python/tests/test_Geom.py index 870cc7469..ac4a8eac3 100644 --- a/python/tests/test_Geom.py +++ b/python/tests/test_Geom.py @@ -11,9 +11,9 @@ from damask import util def geom_equal(a,b): - return np.all(a.get_microstructure() == b.get_microstructure()) and \ - np.all(a.get_grid() == b.get_grid()) and \ - np.allclose(a.get_size(), b.get_size()) and \ + return np.all(a.material == b.material) and \ + np.all(a.grid == b.grid) and \ + np.allclose(a.size, b.size) and \ str(a.diff(b)) == str(b.diff(a)) @pytest.fixture @@ -33,115 +33,89 @@ def reference_dir(reference_dir_base): class TestGeom: - @pytest.mark.parametrize('flavor',['plain','explicit']) - def test_duplicate(self,default,flavor): - if flavor == 'plain': - modified = default.duplicate() - elif flavor == 'explicit': - modified = default.duplicate( - default.get_microstructure(), - default.get_size(), - default.get_origin() - ) - print(modified) - assert geom_equal(default,modified) - def test_diff_equal(self,default): assert str(default.diff(default)) == '' + def test_diff_not_equal(self,default): - new = Geom(default.microstructure[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified']) + new = Geom(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified']) assert str(default.diff(new)) != '' - @pytest.mark.parametrize('masked',[True,False]) - def test_set_microstructure(self,default,masked): - old = default.get_microstructure() - new = np.random.randint(200,size=default.grid) - default.set_microstructure(np.ma.MaskedArray(new,np.full_like(new,masked))) - assert np.all(default.microstructure==(old if masked else new)) - - def test_write_read_str(self,default,tmpdir): - default.to_file(str(tmpdir/'default.geom'),format='ASCII') - new = Geom.from_file(str(tmpdir/'default.geom')) + default.save_ASCII(str(tmpdir/'default.geom')) + new = Geom.load_ASCII(str(tmpdir/'default.geom')) assert geom_equal(default,new) + def test_write_read_file(self,default,tmpdir): with open(tmpdir/'default.geom','w') as f: - default.to_file(f,format='ASCII',pack=True) + default.save_ASCII(f,compress=True) with open(tmpdir/'default.geom') as f: - new = Geom.from_file(f) + new = Geom.load_ASCII(f) assert geom_equal(default,new) - def test_write_as_ASCII(self,default,tmpdir): - with open(tmpdir/'str.geom','w') as f: - f.write(default.as_ASCII()) - with open(tmpdir/'str.geom') as f: - new = Geom.from_file(f) - assert geom_equal(default,new) def test_read_write_vtr(self,default,tmpdir): - default.to_file(tmpdir/'default',format='vtr') + default.save(tmpdir/'default') for _ in range(10): time.sleep(.2) if os.path.exists(tmpdir/'default.vtr'): break - new = Geom.from_vtr(tmpdir/'default.vtr') + new = Geom.load(tmpdir/'default.vtr') assert geom_equal(new,default) + def test_invalid_geom(self,tmpdir): with open('invalid_file','w') as f: f.write('this is not a valid header') with open('invalid_file','r') as f: with pytest.raises(TypeError): - Geom.from_file(f) + Geom.load_ASCII(f) + def test_invalid_vtr(self,tmpdir): v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0) - v.to_file(tmpdir/'no_materialpoint.vtr') + v.save(tmpdir/'no_materialpoint.vtr') for _ in range(10): time.sleep(.2) if os.path.exists(tmpdir/'no_materialpoint.vtr'): break with pytest.raises(ValueError): - Geom.from_vtr(tmpdir/'no_materialpoint.vtr') + Geom.load(tmpdir/'no_materialpoint.vtr') - @pytest.mark.parametrize('pack',[True,False]) - def test_pack(self,default,tmpdir,pack): - default.to_file(tmpdir/'default.geom',format='ASCII',pack=pack) - new = Geom.from_file(tmpdir/'default.geom') + @pytest.mark.parametrize('compress',[True,False]) + def test_compress(self,default,tmpdir,compress): + default.save_ASCII(tmpdir/'default.geom',compress=compress) + new = Geom.load_ASCII(tmpdir/'default.geom') assert geom_equal(new,default) - def test_invalid_combination(self,default): - with pytest.raises(ValueError): - default.duplicate(default.microstructure[1:,1:,1:],size=np.ones(3), autosize=True) def test_invalid_size(self,default): with pytest.raises(ValueError): - default.duplicate(default.microstructure[1:,1:,1:],size=np.ones(2)) + Geom(default.material[1:,1:,1:], + size=np.ones(2)) + def test_invalid_origin(self,default): with pytest.raises(ValueError): - default.duplicate(default.microstructure[1:,1:,1:],origin=np.ones(4)) + Geom(default.material[1:,1:,1:], + size=np.ones(3), + origin=np.ones(4)) - def test_invalid_microstructure_size(self,default): - microstructure = np.ones((3,3)) + + def test_invalid_materials_shape(self,default): + material = np.ones((3,3)) with pytest.raises(ValueError): - default.duplicate(microstructure) + Geom(material, + size=np.ones(3)) - def test_invalid_microstructure_type(self,default): - microstructure = np.random.randint(1,300,(3,4,5))==1 - with pytest.raises(TypeError): - default.duplicate(microstructure) - def test_invalid_homogenization(self,default): + def test_invalid_materials_type(self,default): + material = np.random.randint(1,300,(3,4,5))==1 with pytest.raises(TypeError): - default.set_homogenization(homogenization=0) + Geom(material) - def test_invalid_write_format(self,default): - with pytest.raises(TypeError): - default.to_file(format='invalid') @pytest.mark.parametrize('directions,reflect',[ (['x'], False), @@ -154,10 +128,11 @@ class TestGeom: modified = default.mirror(directions,reflect) tag = f'directions={"-".join(directions)}_reflect={reflect}' reference = reference_dir/f'mirror_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) + @pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]]) def test_mirror_invalid(self,default,directions): with pytest.raises(ValueError): @@ -175,17 +150,20 @@ class TestGeom: modified = default.flip(directions) tag = f'directions={"-".join(directions)}' reference = reference_dir/f'flip_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) + def test_flip_invariant(self,default): assert geom_equal(default,default.flip([])) + @pytest.mark.parametrize('direction',[['x'],['x','y']]) def test_flip_double(self,default,direction): assert geom_equal(default,default.flip(direction).flip(direction)) + @pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]]) def test_flip_invalid(self,default,directions): with pytest.raises(ValueError): @@ -199,14 +177,15 @@ class TestGeom: current = default.clean(stencil,selection,periodic) reference = reference_dir/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}' if update and stencil > 1: - current.to_file(reference,format='vtr') + current.save(reference) for _ in range(10): time.sleep(.2) if os.path.exists(reference.with_suffix('.vtr')): break - assert geom_equal(Geom.from_vtr(reference) if stencil > 1 else default, + assert geom_equal(Geom.load(reference) if stencil > 1 else default, current ) + @pytest.mark.parametrize('grid',[ (10,11,10), [10,13,10], @@ -220,26 +199,33 @@ class TestGeom: modified = default.scale(grid) tag = f'grid={util.srepr(grid,"-")}' reference = reference_dir/f'scale_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) + def test_renumber(self,default): - microstructure = default.get_microstructure() - for m in np.unique(microstructure): - microstructure[microstructure==m] = microstructure.max() + np.random.randint(1,30) - modified = default.duplicate(microstructure) + material = default.material.copy() + for m in np.unique(material): + material[material==m] = material.max() + np.random.randint(1,30) + modified = Geom(material, + default.size, + default.origin) assert not geom_equal(modified,default) assert geom_equal(default, modified.renumber()) + def test_substitute(self,default): offset = np.random.randint(1,500) - modified = default.duplicate(default.get_microstructure() + offset) + modified = Geom(default.material + offset, + default.size, + default.origin) assert not geom_equal(modified,default) assert geom_equal(default, - modified.substitute(np.arange(default.microstructure.max())+1+offset, - np.arange(default.microstructure.max())+1)) + modified.substitute(np.arange(default.material.max())+1+offset, + np.arange(default.material.max())+1)) + @pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]), np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])]) @@ -249,21 +235,24 @@ class TestGeom: modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True)) assert geom_equal(default,modified) + @pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0], [0.0,32.0,240.0]]) def test_rotate(self,default,update,reference_dir,Eulers): modified = default.rotate(Rotation.from_Eulers(Eulers,degrees=True)) tag = f'Eulers={util.srepr(Eulers,"-")}' reference = reference_dir/f'rotate_{tag}.geom' - if update: modified.to_file(reference) - assert geom_equal(Geom.from_file(reference), + if update: modified.save_ASCII(reference) + assert geom_equal(Geom.load_ASCII(reference), modified) + def test_canvas(self,default): grid = default.grid grid_add = np.random.randint(0,30,(3)) modified = default.canvas(grid + grid_add) - assert np.all(modified.microstructure[:grid[0],:grid[1],:grid[2]] == default.microstructure) + assert np.all(modified.material[:grid[0],:grid[1],:grid[2]] == default.material) + @pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8), (np.random.randint(4,8,(3)),np.random.randint(9,12,(3)))]) @@ -276,13 +265,14 @@ class TestGeom: np.random.rand()*4, np.random.randint(20)]) def test_add_primitive_shift(self,center1,center2,diameter,exponent): - """Same volume fraction for periodic microstructures and different center.""" + """Same volume fraction for periodic geometries and different center.""" o = np.random.random(3)-.5 g = np.random.randint(8,32,(3)) s = np.random.random(3)+.5 G_1 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent) G_2 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent) - assert np.count_nonzero(G_1.microstructure!=2) == np.count_nonzero(G_2.microstructure!=2) + assert np.count_nonzero(G_1.material!=2) == np.count_nonzero(G_2.material!=2) + @pytest.mark.parametrize('center',[np.random.randint(4,10,(3)), np.random.randint(2,10), @@ -299,6 +289,7 @@ class TestGeom: G_2 = Geom(np.ones(g,'i'),[1.,1.,1.]).add_primitive(.3,center,1,fill,Rotation.from_Eulers(eu),inverse,periodic=periodic) assert geom_equal(G_1,G_2) + @pytest.mark.parametrize('trigger',[[1],[]]) def test_vicinity_offset(self,trigger): offset = np.random.randint(2,4) @@ -317,13 +308,15 @@ class TestGeom: geom = Geom(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger) - assert np.all(m2==geom.microstructure) + assert np.all(m2==geom.material) + @pytest.mark.parametrize('periodic',[True,False]) def test_vicinity_offset_invariant(self,default,periodic): - old = default.get_microstructure() - default.vicinity_offset(trigger=[old.max()+1,old.min()-1]) - assert np.all(old==default.microstructure) + offset = default.vicinity_offset(trigger=[default.material.max()+1, + default.material.min()-1]) + assert np.all(offset.material==default.material) + @pytest.mark.parametrize('periodic',[True,False]) def test_tessellation_approaches(self,periodic): @@ -335,6 +328,7 @@ class TestGeom: Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(N_seeds),periodic) assert geom_equal(Laguerre,Voronoi) + def test_Laguerre_weights(self): grid = np.random.randint(10,20,3) size = np.random.random(3) + 1.0 @@ -344,17 +338,18 @@ class TestGeom: ms = np.random.randint(1, N_seeds+1) weights[ms-1] = np.random.random() Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,weights,np.random.random()>0.5) - assert np.all(Laguerre.microstructure == ms) + assert np.all(Laguerre.material == ms) + @pytest.mark.parametrize('approach',['Laguerre','Voronoi']) def test_tessellate_bicrystal(self,approach): grid = np.random.randint(5,10,3)*2 size = grid.astype(np.float) seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5]))) - microstructure = np.ones(grid) - microstructure[:,grid[1]//2:,:] = 2 + material = np.ones(grid) + material[:,grid[1]//2:,:] = 2 if approach == 'Laguerre': geom = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(2),np.random.random()>0.5) elif approach == 'Voronoi': geom = Geom.from_Voronoi_tessellation(grid,size,seeds, np.random.random()>0.5) - assert np.all(geom.microstructure == microstructure) + assert np.all(geom.material == material) diff --git a/python/tests/test_Material.py b/python/tests/test_Material.py new file mode 100644 index 000000000..567dfe646 --- /dev/null +++ b/python/tests/test_Material.py @@ -0,0 +1,61 @@ +import os + +import pytest + +from damask import Material + +@pytest.fixture +def reference_dir(reference_dir_base): + """Directory containing reference results.""" + return reference_dir_base/'Material' + + +class TestMaterial: + + @pytest.mark.parametrize('fname',[None,'test.yaml']) + def test_load_save(self,reference_dir,tmp_path,fname): + reference = Material.load(reference_dir/'material.yaml') + os.chdir(tmp_path) + if fname is None: + reference.save() + new = Material.load('material.yaml') + else: + reference.save(fname) + new = Material.load(fname) + assert reference == new + + def test_valid_complete(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + assert material_config.is_valid and material_config.is_complete + + def test_invalid_lattice(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + material_config['phase']['Aluminum']['lattice']='fxc' + assert not material_config.is_valid + + def test_invalid_orientation(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + material_config['microstructure'][0]['constituents'][0]['orientation']=[0,0,0,0] + assert not material_config.is_valid + + def test_invalid_fraction(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + material_config['microstructure'][0]['constituents'][0]['fraction']=.9 + assert not material_config.is_valid + + + @pytest.mark.parametrize('item',['homogenization','phase','microstructure']) + def test_incomplete_missing(self,reference_dir,item): + material_config = Material.load(reference_dir/'material.yaml') + del material_config[item] + assert not material_config.is_complete + + def test_incomplete_wrong_phase(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + new = material_config.microstructure_rename_phase({'Steel':'FeNbC'}) + assert not new.is_complete + + def test_incomplete_wrong_homogenization(self,reference_dir): + material_config = Material.load(reference_dir/'material.yaml') + new = material_config.microstructure_rename_homogenization({'Taylor':'isostrain'}) + assert not new.is_complete diff --git a/python/tests/test_Orientation.py b/python/tests/test_Orientation.py index 9a23dc0ed..669f73e91 100644 --- a/python/tests/test_Orientation.py +++ b/python/tests/test_Orientation.py @@ -106,8 +106,8 @@ class TestOrientation: coords = np.array([(1,i+1) for i,x in enumerate(eu)]) table = Table(eu,{'Eulers':(3,)}) table = table.add('pos',coords) - table.to_ASCII(reference) - assert np.allclose(eu,Table.from_ASCII(reference).get('Eulers')) + table.save(reference) + assert np.allclose(eu,Table.load(reference).get('Eulers')) @pytest.mark.parametrize('lattice',Lattice.lattices) def test_disorientation360(self,lattice): @@ -129,4 +129,3 @@ class TestOrientation: eqs = [r for r in R_1.equivalent] R_2 = Orientation.from_average(eqs) assert np.allclose(R_1.rotation.quaternion,R_2.rotation.quaternion) - diff --git a/python/tests/test_Result.py b/python/tests/test_Result.py index 6000f50f9..68b72badf 100644 --- a/python/tests/test_Result.py +++ b/python/tests/test_Result.py @@ -339,8 +339,8 @@ class TestResult: @pytest.mark.parametrize('output',['F',[],['F','P']]) def test_vtk(self,tmp_path,default,output): os.chdir(tmp_path) - default.to_vtk(output) + default.save_vtk(output) def test_XDMF(self,tmp_path,single_phase): os.chdir(tmp_path) - single_phase.write_XDMF() + single_phase.save_XDMF() diff --git a/python/tests/test_Rotation.py b/python/tests/test_Rotation.py index 5435895a2..66cabfbd4 100644 --- a/python/tests/test_Rotation.py +++ b/python/tests/test_Rotation.py @@ -461,7 +461,7 @@ def mul(me, other): if other.shape == (3,): A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:]) B = 2.0 * np.dot(me.quaternion[1:],other) - C = 2.0 * _P*me.quaternion[0] + C = 2.0 * _P * me.quaternion[0] return A*other + B*me.quaternion[1:] + C * np.cross(me.quaternion[1:],other) @@ -496,9 +496,8 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) if np.isclose(rot.as_quaternion()[0],0.0,atol=atol): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o),1.0) + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._om2qu,Rotation._qu2om), (Rotation._om2eu,Rotation._eu2om), @@ -512,8 +511,7 @@ class TestRotation: m = rot.as_matrix() o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.det(o),1.0) + assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._eu2qu,Rotation._qu2eu), (Rotation._eu2om,Rotation._om2eu), @@ -531,9 +529,9 @@ class TestRotation: ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol): sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]]) - ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all() + ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol) + assert ok and (np.zeros(3)-1.e-9 <= o).all() \ + and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ax2qu,Rotation._qu2ax), (Rotation._ax2om,Rotation._om2ax), @@ -548,9 +546,8 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) if np.isclose(m[3],np.pi,atol=atol): - ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9 + ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ro2qu,Rotation._qu2ro), #(Rotation._ro2om,Rotation._om2ro), @@ -566,8 +563,7 @@ class TestRotation: o = backward(forward(m)) ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol) ok = ok or np.isclose(m[3],0.0,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._ho2qu,Rotation._qu2ho), (Rotation._ho2om,Rotation._om2ho), @@ -581,8 +577,7 @@ class TestRotation: m = rot.as_homochoric() o = backward(forward(m)) ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.linalg.norm(o) < _R1 + 1.e-9 + assert ok and np.linalg.norm(o) < _R1 + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('forward,backward',[(Rotation._cu2qu,Rotation._qu2cu), (Rotation._cu2om,Rotation._om2cu), @@ -598,8 +593,7 @@ class TestRotation: ok = np.allclose(m,o,atol=atol) if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)): ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9 + assert ok and np.max(np.abs(o)) < np.pi**(2./3.) * 0.5 + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('vectorized, single',[(Rotation._qu2om,qu2om), (Rotation._qu2eu,qu2eu), @@ -612,8 +606,7 @@ class TestRotation: vectorized(qu.reshape(qu.shape[0]//2,-1,4)) co = vectorized(qu) for q,c in zip(qu,co): - print(q,c) - assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)) + assert np.allclose(single(q),c) and np.allclose(single(q),vectorized(q)), f'{q},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._om2qu,om2qu), @@ -625,8 +618,7 @@ class TestRotation: vectorized(om.reshape(om.shape[0]//2,-1,3,3)) co = vectorized(om) for o,c in zip(om,co): - print(o,c) - assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)) + assert np.allclose(single(o),c) and np.allclose(single(o),vectorized(o)), f'{o},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._eu2qu,eu2qu), (Rotation._eu2om,eu2om), @@ -638,8 +630,7 @@ class TestRotation: vectorized(eu.reshape(eu.shape[0]//2,-1,3)) co = vectorized(eu) for e,c in zip(eu,co): - print(e,c) - assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)) + assert np.allclose(single(e),c) and np.allclose(single(e),vectorized(e)), f'{e},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ax2qu,ax2qu), (Rotation._ax2om,ax2om), @@ -651,8 +642,7 @@ class TestRotation: vectorized(ax.reshape(ax.shape[0]//2,-1,4)) co = vectorized(ax) for a,c in zip(ax,co): - print(a,c) - assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)) + assert np.allclose(single(a),c) and np.allclose(single(a),vectorized(a)), f'{a},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ro2ax,ro2ax), @@ -663,8 +653,7 @@ class TestRotation: vectorized(ro.reshape(ro.shape[0]//2,-1,4)) co = vectorized(ro) for r,c in zip(ro,co): - print(r,c) - assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)) + assert np.allclose(single(r),c) and np.allclose(single(r),vectorized(r)), f'{r},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._ho2ax,ho2ax), (Rotation._ho2cu,ho2cu)]) @@ -674,8 +663,7 @@ class TestRotation: vectorized(ho.reshape(ho.shape[0]//2,-1,3)) co = vectorized(ho) for h,c in zip(ho,co): - print(h,c) - assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)) + assert np.allclose(single(h),c) and np.allclose(single(h),vectorized(h)), f'{h},{c}' @pytest.mark.parametrize('vectorized, single',[(Rotation._cu2ho,cu2ho)]) def test_cubochoric_vectorization(self,set_of_rotations,vectorized,single): @@ -684,8 +672,7 @@ class TestRotation: vectorized(cu.reshape(cu.shape[0]//2,-1,3)) co = vectorized(cu) for u,c in zip(cu,co): - print(u,c) - assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)) + assert np.allclose(single(u),c) and np.allclose(single(u),vectorized(u)), f'{u},{c}' @pytest.mark.parametrize('func',[Rotation.from_axis_angle]) def test_normalization_vectorization(self,func): @@ -703,9 +690,8 @@ class TestRotation: o = Rotation.from_Eulers(rot.as_Eulers(degrees),degrees).as_quaternion() ok = np.allclose(m,o,atol=atol) if np.isclose(rot.as_quaternion()[0],0.0,atol=atol): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o),1.0) + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('normalize',[True,False]) @@ -717,12 +703,12 @@ class TestRotation: o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Eulers() u = np.array([np.pi*2,np.pi,np.pi*2]) ok = np.allclose(m,o,atol=atol) - ok = ok or np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) + ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol) if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol): sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]]) - ok = ok or np.isclose(sum_phi[0],sum_phi[1],atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and (np.zeros(3)-1.e-9 <= o).all() and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all() + ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol) + assert ok and (np.zeros(3)-1.e-9 <= o).all() \ + and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}' def test_matrix(self,set_of_rotations): for rot in set_of_rotations: @@ -731,8 +717,8 @@ class TestRotation: ok = np.allclose(m,o,atol=atol) if np.isclose(m[3],np.pi,atol=atol): ok = ok or np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) and o[3]<=np.pi+1.e-9 + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \ + and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('normalize',[True,False]) @@ -742,8 +728,7 @@ class TestRotation: m = rot.as_matrix() o = Rotation.from_Rodrigues(rot.as_Rodrigues()*c,normalize,P).as_matrix() ok = np.allclose(m,o,atol=atol) - print(m,o) - assert ok and np.isclose(np.linalg.det(o),1.0) + assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}' @pytest.mark.parametrize('P',[1,-1]) def test_homochoric(self,set_of_rotations,P): @@ -753,8 +738,7 @@ class TestRotation: o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues() ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol) ok = ok or np.isclose(m[3],0.0,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) + assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) def test_cubochoric(self,set_of_rotations,P): @@ -762,8 +746,7 @@ class TestRotation: m = rot.as_homochoric() o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric() ok = np.allclose(m,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9 + assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('P',[1,-1]) @pytest.mark.parametrize('accept_homomorph',[True,False]) @@ -774,9 +757,8 @@ class TestRotation: o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,P).as_cubochoric() ok = np.allclose(m,o,atol=atol) if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)): - ok = ok or np.allclose(m*-1.,o,atol=atol) - print(m,o,rot.as_quaternion()) - assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9 + ok |= np.allclose(m*-1.,o,atol=atol) + assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}' @pytest.mark.parametrize('reciprocal',[True,False]) def test_basis(self,set_of_rotations,reciprocal): @@ -858,8 +840,7 @@ class TestRotation: for rot in set_of_rotations: v = rot.broadcast_to((5,)) @ data for i in range(data.shape[0]): - print(i-data[i]) - assert np.allclose(mul(rot,data[i]),v[i]) + assert np.allclose(mul(rot,data[i]),v[i]), f'{i-data[i]}' @pytest.mark.parametrize('data',[np.random.rand(3), @@ -926,34 +907,39 @@ class TestRotation: @pytest.mark.parametrize('sigma',[5,10,15,20]) @pytest.mark.parametrize('N',[1000,10000,100000]) def test_spherical_component(self,N,sigma): - c = Rotation.from_random() - o = Rotation.from_spherical_component(c,sigma,N) - _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) - angles[::2] *= -1 # flip angle for every second to symmetrize distribution + p = [] + for run in range(5): + c = Rotation.from_random() + o = Rotation.from_spherical_component(c,sigma,N) + _, angles = c.misorientation(o).as_axis_angle(pair=True,degrees=True) + angles[::2] *= -1 # flip angle for every second to symmetrize distribution + + p.append(stats.normaltest(angles)[1]) - p = stats.normaltest(angles)[1] sigma_out = np.std(angles) - print(f'\np: {p}, sigma ratio {sigma/sigma_out}') - assert (.9 < sigma/sigma_out < 1.1) and p > 0.001 + p = np.average(p) + assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}' @pytest.mark.parametrize('sigma',[5,10,15,20]) @pytest.mark.parametrize('N',[1000,10000,100000]) def test_from_fiber_component(self,N,sigma): - """https://en.wikipedia.org/wiki/Full_width_at_half_maximum.""" - alpha = np.random.random(2)*np.pi - beta = np.random.random(2)*np.pi + p = [] + for run in range(5): + alpha = np.random.random()*2*np.pi,np.arccos(np.random.random()) + beta = np.random.random()*2*np.pi,np.arccos(np.random.random()) - f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) - f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) - ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) - n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system + f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) + f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) + ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) + n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system - o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) - angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) - dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) + o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) + angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) + dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) + + p.append(stats.normaltest(dist)[1]) - p = stats.normaltest(dist)[1] sigma_out = np.degrees(np.std(dist)) - print(f'\np: {p}, sigma ratio {sigma/sigma_out}') - assert (.9 < sigma/sigma_out < 1.1) and p > 0.001 + p = np.average(p) + assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}' diff --git a/python/tests/test_Table.py b/python/tests/test_Table.py index 1763e27ef..7a86c7fed 100644 --- a/python/tests/test_Table.py +++ b/python/tests/test_Table.py @@ -35,50 +35,50 @@ class TestTable: @pytest.mark.parametrize('mode',['str','path']) def test_write_read(self,default,tmpdir,mode): - default.to_file(tmpdir/'default.txt') + default.save(tmpdir/'default.txt') if mode == 'path': - new = Table.from_ASCII(tmpdir/'default.txt') + new = Table.load(tmpdir/'default.txt') elif mode == 'str': - new = Table.from_ASCII(str(tmpdir/'default.txt')) + new = Table.load(str(tmpdir/'default.txt')) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_read_file(self,default,tmpdir): with open(tmpdir/'default.txt','w') as f: - default.to_file(f) + default.save(f) with open(tmpdir/'default.txt') as f: - new = Table.from_ASCII(f) + new = Table.load(f) assert all(default.data==new.data) and default.shapes == new.shapes - def test_write_read_new_style(self,default,tmpdir): - with open(tmpdir/'new_style.txt','w') as f: - default.to_file(f,new_style=True) - with open(tmpdir/'new_style.txt') as f: - new = Table.from_ASCII(f) + def test_write_read_legacy_style(self,default,tmpdir): + with open(tmpdir/'legacy.txt','w') as f: + default.save(f,legacy=True) + with open(tmpdir/'legacy.txt') as f: + new = Table.load(f) assert all(default.data==new.data) and default.shapes == new.shapes def test_write_invalid_format(self,default,tmpdir): with pytest.raises(TypeError): - default.to_file(tmpdir/'shouldnotbethere.txt',format='invalid') + default.save(tmpdir/'shouldnotbethere.txt',format='invalid') @pytest.mark.parametrize('mode',['str','path']) def test_read_ang(self,reference_dir,mode): if mode == 'path': - new = Table.from_ang(reference_dir/'simple.ang') + new = Table.load_ang(reference_dir/'simple.ang') elif mode == 'str': - new = Table.from_ang(str(reference_dir/'simple.ang')) + new = Table.load_ang(str(reference_dir/'simple.ang')) assert new.data.shape == (4,10) and \ new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit'] def test_read_ang_file(self,reference_dir): f = open(reference_dir/'simple.ang') - new = Table.from_ang(f) + new = Table.load_ang(f) assert new.data.shape == (4,10) and \ new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit'] @pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt']) def test_read_strange(self,reference_dir,fname): with open(reference_dir/fname) as f: - Table.from_ASCII(f) + Table.load(f) def test_set(self,default): d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F') @@ -166,7 +166,7 @@ class TestTable: x = np.random.random((5,12)) t = Table(x,{'F':(3,3),'v':(3,)},['random test data']) unsort = t.get('4_F') - sort = t.sort_by('4_F').get('4_F') + sort = t.sort_by('4_F').get('4_F') assert np.all(np.sort(unsort,0)==sort) def test_sort_revert(self): @@ -179,6 +179,6 @@ class TestTable: t = Table(np.array([[0,1,],[2,1,]]), {'v':(2,)}, ['test data'])\ - .add('s',np.array(['b','a']))\ - .sort_by('s') + .add('s',np.array(['b','a']))\ + .sort_by('s') assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1)) diff --git a/python/tests/test_VTK.py b/python/tests/test_VTK.py index f6be4da25..81c9eb772 100644 --- a/python/tests/test_VTK.py +++ b/python/tests/test_VTK.py @@ -32,22 +32,22 @@ class TestVTK: origin = np.random.random(3) v = VTK.from_rectilinearGrid(grid,size,origin) string = v.__repr__() - v.to_file(tmp_path/'rectilinearGrid',False) - vtr = VTK.from_file(tmp_path/'rectilinearGrid.vtr') + v.save(tmp_path/'rectilinearGrid',False) + vtr = VTK.load(tmp_path/'rectilinearGrid.vtr') with open(tmp_path/'rectilinearGrid.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid') + vtk = VTK.load(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid') assert(string == vtr.__repr__() == vtk.__repr__()) def test_polyData(self,tmp_path): points = np.random.rand(100,3) v = VTK.from_polyData(points) string = v.__repr__() - v.to_file(tmp_path/'polyData',False) - vtp = VTK.from_file(tmp_path/'polyData.vtp') + v.save(tmp_path/'polyData',False) + vtp = VTK.load(tmp_path/'polyData.vtp') with open(tmp_path/'polyData.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'polyData.vtk','polyData') + vtk = VTK.load(tmp_path/'polyData.vtk','polyData') assert(string == vtp.__repr__() == vtk.__repr__()) @pytest.mark.parametrize('cell_type,n',[ @@ -62,11 +62,11 @@ class TestVTK: connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n) v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type) string = v.__repr__() - v.to_file(tmp_path/'unstructuredGrid',False) - vtu = VTK.from_file(tmp_path/'unstructuredGrid.vtu') + v.save(tmp_path/'unstructuredGrid',False) + vtu = VTK.load(tmp_path/'unstructuredGrid.vtu') with open(tmp_path/'unstructuredGrid.vtk','w') as f: f.write(string) - vtk = VTK.from_file(tmp_path/'unstructuredGrid.vtk','unstructuredgrid') + vtk = VTK.load(tmp_path/'unstructuredGrid.vtk','unstructuredgrid') assert(string == vtu.__repr__() == vtk.__repr__()) @@ -75,8 +75,8 @@ class TestVTK: v = VTK.from_polyData(points) fname_s = tmp_path/'single.vtp' fname_p = tmp_path/'parallel.vtp' - v.to_file(fname_s,False) - v.to_file(fname_p,True) + v.save(fname_s,False) + v.save(fname_p,True) for i in range(10): if os.path.isfile(fname_p) and filecmp.cmp(fname_s,fname_p): assert(True) @@ -90,11 +90,11 @@ class TestVTK: ('this_file_does_not_exist.vtx', None)]) def test_invalid_dataset_type(self,name,dataset_type): with pytest.raises(TypeError): - VTK.from_file(name,dataset_type) + VTK.load(name,dataset_type) def test_invalid_extension_write(self,default): with pytest.raises(ValueError): - default.to_file('default.txt') + default.save('default.txt') def test_invalid_get(self,default): with pytest.raises(ValueError): @@ -115,8 +115,8 @@ class TestVTK: def test_comments(self,tmp_path,default): default.add_comments(['this is a comment']) - default.to_file(tmp_path/'with_comments',parallel=False) - new = VTK.from_file(tmp_path/'with_comments.vtr') + default.save(tmp_path/'with_comments',parallel=False) + new = VTK.load(tmp_path/'with_comments.vtr') assert new.get_comments() == ['this is a comment'] def test_compare_reference_polyData(self,update,reference_dir,tmp_path): @@ -124,9 +124,9 @@ class TestVTK: polyData = VTK.from_polyData(points) polyData.add(points,'coordinates') if update: - polyData.to_file(reference_dir/'polyData') + polyData.save(reference_dir/'polyData') else: - reference = VTK.from_file(reference_dir/'polyData.vtp') + reference = VTK.load(reference_dir/'polyData.vtp') assert polyData.__repr__() == reference.__repr__() and \ np.allclose(polyData.get('coordinates'),points) @@ -139,8 +139,8 @@ class TestVTK: rectilinearGrid.add(c,'cell') rectilinearGrid.add(n,'node') if update: - rectilinearGrid.to_file(reference_dir/'rectilinearGrid') + rectilinearGrid.save(reference_dir/'rectilinearGrid') else: - reference = VTK.from_file(reference_dir/'rectilinearGrid.vtr') + reference = VTK.load(reference_dir/'rectilinearGrid.vtr') assert rectilinearGrid.__repr__() == reference.__repr__() and \ np.allclose(rectilinearGrid.get('cell'),c) diff --git a/python/tests/test_util.py b/python/tests/test_util.py index c786e0de1..053045741 100644 --- a/python/tests/test_util.py +++ b/python/tests/test_util.py @@ -18,8 +18,8 @@ class TestUtil: @pytest.mark.parametrize('input,output', [ - ([2,0],[1,0]), - ([0.5,0.5],[1,1]), + ([0,-2],[0,-1]), + ([-0.5,0.5],[-1,1]), ([1./2.,1./3.],[3,2]), ([2./3.,1./2.,1./3.],[4,3,2]), ]) @@ -30,4 +30,4 @@ class TestUtil: def test_lackofprecision(self): with pytest.raises(ValueError): - util.scale_to_coprime(np.array([1/3333,1,1])) + util.scale_to_coprime(np.array([1/333.333,1,1])) diff --git a/src/CPFEM.f90 b/src/CPFEM.f90 index d0fc6413e..05255e2a1 100644 --- a/src/CPFEM.f90 +++ b/src/CPFEM.f90 @@ -106,7 +106,7 @@ subroutine CPFEM_init num_commercialFEM, & debug_CPFEM - print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(6) + print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT) allocate(CPFEM_cs( 6,discretization_nIP,discretization_nElem), source= 0.0_pReal) allocate(CPFEM_dcsdE( 6,6,discretization_nIP,discretization_nElem), source= 0.0_pReal) @@ -132,7 +132,7 @@ subroutine CPFEM_init print'(a32,1x,6(i8,1x))', 'CPFEM_cs: ', shape(CPFEM_cs) print'(a32,1x,6(i8,1x))', 'CPFEM_dcsdE: ', shape(CPFEM_dcsdE) print'(a32,1x,6(i8,1x),/)', 'CPFEM_dcsdE_knownGood: ', shape(CPFEM_dcsdE_knownGood) - flush(6) + flush(IO_STDOUT) endif end subroutine CPFEM_init @@ -250,7 +250,7 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS '<< CPFEM >> stress/MPa at elFE ip ', elFE, ip, CPFEM_cs(1:6,ip,elCP)*1.0e-6_pReal print'(a,i8,1x,i2,/,6(12x,6(f10.3,1x)/))', & '<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal - flush(6) + flush(IO_STDOUT) endif endif diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index c87f361c2..fed43ba78 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -76,7 +76,7 @@ end subroutine CPFEM_initAll !-------------------------------------------------------------------------------------------------- subroutine CPFEM_init - print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(6) + print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT) if (interface_restartInc > 0) call crystallite_restartRead diff --git a/src/IO.f90 b/src/IO.f90 index 9137372d3..f9e708ead 100644 --- a/src/IO.f90 +++ b/src/IO.f90 @@ -7,8 +7,8 @@ !-------------------------------------------------------------------------------------------------- module IO use, intrinsic :: ISO_fortran_env, only: & - OUTPUT_UNIT, & - ERROR_UNIT + IO_STDOUT => OUTPUT_UNIT, & + IO_STDERR => ERROR_UNIT use prec @@ -20,7 +20,7 @@ module IO character, parameter, public :: & IO_EOL = new_line('DAMASK'), & !< end of line character IO_COMMENT = '#' - character(len=*), parameter, private :: & + character(len=*), parameter :: & IO_DIVIDER = '───────────────────'//& '───────────────────'//& '───────────────────'//& @@ -42,7 +42,7 @@ module IO IO_stringAsBool, & IO_error, & IO_warning, & - OUTPUT_UNIT + IO_STDOUT contains @@ -52,7 +52,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine IO_init - print'(/,a)', ' <<<+- IO init -+>>>'; flush(6) + print'(/,a)', ' <<<+- IO init -+>>>'; flush(IO_STDOUT) call selfTest @@ -543,29 +543,29 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg) end select !$OMP CRITICAL (write2out) - write(ERROR_UNIT,'(/,a)') ' ┌'//IO_DIVIDER//'┐' - write(ERROR_UNIT,'(a,24x,a,40x,a)') ' │','error', '│' - write(ERROR_UNIT,'(a,24x,i3,42x,a)') ' │',error_ID, '│' - write(ERROR_UNIT,'(a)') ' ├'//IO_DIVIDER//'┤' + write(IO_STDERR,'(/,a)') ' ┌'//IO_DIVIDER//'┐' + write(IO_STDERR,'(a,24x,a,40x,a)') ' │','error', '│' + write(IO_STDERR,'(a,24x,i3,42x,a)') ' │',error_ID, '│' + write(IO_STDERR,'(a)') ' ├'//IO_DIVIDER//'┤' write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',& max(1,72-len_trim(msg)-4),'x,a)' - write(ERROR_UNIT,formatString) '│ ',trim(msg), '│' + write(IO_STDERR,formatString) '│ ',trim(msg), '│' if (present(ext_msg)) then write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',& max(1,72-len_trim(ext_msg)-4),'x,a)' - write(ERROR_UNIT,formatString) '│ ',trim(ext_msg), '│' + write(IO_STDERR,formatString) '│ ',trim(ext_msg), '│' endif if (present(el)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│' + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│' if (present(ip)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│' + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│' if (present(g)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│' + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│' if (present(instance)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at instance ',instance, '│' - write(ERROR_UNIT,'(a,69x,a)') ' │', '│' - write(ERROR_UNIT,'(a)') ' └'//IO_DIVIDER//'┘' - flush(ERROR_UNIT) + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at instance ',instance, '│' + write(IO_STDERR,'(a,69x,a)') ' │', '│' + write(IO_STDERR,'(a)') ' └'//IO_DIVIDER//'┘' + flush(IO_STDERR) call quit(9000+error_ID) !$OMP END CRITICAL (write2out) @@ -628,27 +628,27 @@ subroutine IO_warning(warning_ID,el,ip,g,ext_msg) end select !$OMP CRITICAL (write2out) - write(ERROR_UNIT,'(/,a)') ' ┌'//IO_DIVIDER//'┐' - write(ERROR_UNIT,'(a,24x,a,38x,a)') ' │','warning', '│' - write(ERROR_UNIT,'(a,24x,i3,42x,a)') ' │',warning_ID, '│' - write(ERROR_UNIT,'(a)') ' ├'//IO_DIVIDER//'┤' + write(IO_STDERR,'(/,a)') ' ┌'//IO_DIVIDER//'┐' + write(IO_STDERR,'(a,24x,a,38x,a)') ' │','warning', '│' + write(IO_STDERR,'(a,24x,i3,42x,a)') ' │',warning_ID, '│' + write(IO_STDERR,'(a)') ' ├'//IO_DIVIDER//'┤' write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(msg)),',',& max(1,72-len_trim(msg)-4),'x,a)' - write(ERROR_UNIT,formatString) '│ ',trim(msg), '│' + write(IO_STDERR,formatString) '│ ',trim(msg), '│' if (present(ext_msg)) then write(formatString,'(a,i6.6,a,i6.6,a)') '(1x,a4,a',max(1,len_trim(ext_msg)),',',& max(1,72-len_trim(ext_msg)-4),'x,a)' - write(ERROR_UNIT,formatString) '│ ',trim(ext_msg), '│' + write(IO_STDERR,formatString) '│ ',trim(ext_msg), '│' endif if (present(el)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│' + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at element ',el, '│' if (present(ip)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│' + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at IP ',ip, '│' if (present(g)) & - write(ERROR_UNIT,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│' - write(ERROR_UNIT,'(a,69x,a)') ' │', '│' - write(ERROR_UNIT,'(a)') ' └'//IO_DIVIDER//'┘' - flush(ERROR_UNIT) + write(IO_STDERR,'(a19,1x,i9,44x,a3)') ' │ at constituent',g, '│' + write(IO_STDERR,'(a,69x,a)') ' │', '│' + write(IO_STDERR,'(a)') ' └'//IO_DIVIDER//'┘' + flush(IO_STDERR) !$OMP END CRITICAL (write2out) end subroutine IO_warning diff --git a/src/base64.f90 b/src/base64.f90 index 3a59b7049..2f91334b7 100644 --- a/src/base64.f90 +++ b/src/base64.f90 @@ -27,7 +27,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine base64_init - print'(/,a)', ' <<<+- base64 init -+>>>'; flush(6) + print'(/,a)', ' <<<+- base64 init -+>>>'; flush(IO_STDOUT) call selfTest diff --git a/src/config.f90 b/src/config.f90 index 50d4e96e8..b10edf013 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -35,7 +35,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine config_init - print'(/,a)', ' <<<+- config init -+>>>'; flush(6) + print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT) call parse_material call parse_numerics @@ -59,7 +59,7 @@ subroutine parse_material inquire(file=fname,exist=fileExists) if(.not. fileExists) call IO_error(100,ext_msg=fname) endif - print*, 'reading '//fname; flush(6) + print*, 'reading '//fname; flush(IO_STDOUT) config_material => YAML_parse_file(fname) end subroutine parse_material @@ -75,7 +75,7 @@ subroutine parse_numerics config_numerics => emptyDict inquire(file='numerics.yaml', exist=fexist) if (fexist) then - print*, 'reading numerics.yaml'; flush(6) + print*, 'reading numerics.yaml'; flush(IO_STDOUT) config_numerics => YAML_parse_file('numerics.yaml') endif @@ -92,7 +92,7 @@ subroutine parse_debug config_debug => emptyDict inquire(file='debug.yaml', exist=fexist) fileExists: if (fexist) then - print*, 'reading debug.yaml'; flush(6) + print*, 'reading debug.yaml'; flush(IO_STDOUT) config_debug => YAML_parse_file('debug.yaml') endif fileExists diff --git a/src/constitutive.f90 b/src/constitutive.f90 index 3b342e3e6..62846359d 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -446,7 +446,7 @@ subroutine constitutive_init call damage_init call thermal_init - print'(/,a)', ' <<<+- constitutive init -+>>>'; flush(6) + print'(/,a)', ' <<<+- constitutive init -+>>>'; flush(IO_STDOUT) constitutive_source_maxSizeDotState = 0 PhaseLoop2:do p = 1,phases%length diff --git a/src/constitutive_plastic_disloTungsten.f90 b/src/constitutive_plastic_disloTungsten.f90 index 2bf4fd48e..98a9ce2f6 100644 --- a/src/constitutive_plastic_disloTungsten.f90 +++ b/src/constitutive_plastic_disloTungsten.f90 @@ -100,7 +100,7 @@ module function plastic_disloTungsten_init() result(myPlasticity) myPlasticity = plastic_active('disloTungsten') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return print*, 'Cereceda et al., International Journal of Plasticity 78:242–256, 2016' diff --git a/src/constitutive_plastic_dislotwin.f90 b/src/constitutive_plastic_dislotwin.f90 index a25815899..4890316b8 100644 --- a/src/constitutive_plastic_dislotwin.f90 +++ b/src/constitutive_plastic_dislotwin.f90 @@ -147,7 +147,7 @@ module function plastic_dislotwin_init() result(myPlasticity) myPlasticity = plastic_active('dislotwin') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return print*, 'Ma and Roters, Acta Materialia 52(12):3603–3612, 2004' diff --git a/src/constitutive_plastic_isotropic.f90 b/src/constitutive_plastic_isotropic.f90 index c78d497d6..477938145 100644 --- a/src/constitutive_plastic_isotropic.f90 +++ b/src/constitutive_plastic_isotropic.f90 @@ -71,7 +71,7 @@ module function plastic_isotropic_init() result(myPlasticity) myPlasticity = plastic_active('isotropic') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return print*, 'Maiti and Eisenlohr, Scripta Materialia 145:37–40, 2018' diff --git a/src/constitutive_plastic_kinehardening.f90 b/src/constitutive_plastic_kinehardening.f90 index fe17b090e..55e482db6 100644 --- a/src/constitutive_plastic_kinehardening.f90 +++ b/src/constitutive_plastic_kinehardening.f90 @@ -83,7 +83,7 @@ module function plastic_kinehardening_init() result(myPlasticity) myPlasticity = plastic_active('kinehardening') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return allocate(param(Ninstance)) diff --git a/src/constitutive_plastic_none.f90 b/src/constitutive_plastic_none.f90 index d62a798cc..ab5f32d3f 100644 --- a/src/constitutive_plastic_none.f90 +++ b/src/constitutive_plastic_none.f90 @@ -35,7 +35,7 @@ module function plastic_none_init() result(myPlasticity) enddo Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return do p = 1, phases%length diff --git a/src/constitutive_plastic_nonlocal.f90 b/src/constitutive_plastic_nonlocal.f90 index 81e1887c6..21523161b 100644 --- a/src/constitutive_plastic_nonlocal.f90 +++ b/src/constitutive_plastic_nonlocal.f90 @@ -189,7 +189,7 @@ module function plastic_nonlocal_init() result(myPlasticity) myPlasticity = plastic_active('nonlocal') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) then call geometry_plastic_nonlocal_disable return diff --git a/src/constitutive_plastic_phenopowerlaw.f90 b/src/constitutive_plastic_phenopowerlaw.f90 index b30a4d9df..8cf63a54d 100644 --- a/src/constitutive_plastic_phenopowerlaw.f90 +++ b/src/constitutive_plastic_phenopowerlaw.f90 @@ -92,7 +92,7 @@ module function plastic_phenopowerlaw_init() result(myPlasticity) myPlasticity = plastic_active('phenopowerlaw') Ninstance = count(myPlasticity) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return allocate(param(Ninstance)) diff --git a/src/crystallite.f90 b/src/crystallite.f90 index 2ee85024b..392dba277 100644 --- a/src/crystallite.f90 +++ b/src/crystallite.f90 @@ -294,7 +294,7 @@ subroutine crystallite_init print'(a42,1x,i10)', ' # of elements: ', eMax print'(a42,1x,i10)', ' # of integration points/element: ', iMax print'(a42,1x,i10)', 'max # of constituents/integration point: ', cMax - flush(6) + flush(IO_STDOUT) endif #endif @@ -1561,7 +1561,7 @@ subroutine crystallite_restartWrite integer(HID_T) :: fileHandle, groupHandle character(len=pStringLen) :: fileName, datasetName - print*, ' writing field and constitutive data required for restart to file';flush(6) + print*, ' writing field and constitutive data required for restart to file';flush(IO_STDOUT) write(fileName,'(a,i0,a)') trim(getSolverJobName())//'_',worldrank,'.hdf5' fileHandle = HDF5_openFile(fileName,'a') diff --git a/src/damage_local.f90 b/src/damage_local.f90 index 3588010b2..af2532184 100644 --- a/src/damage_local.f90 +++ b/src/damage_local.f90 @@ -49,7 +49,7 @@ subroutine damage_local_init homog, & homogDamage - print'(/,a)', ' <<<+- damage_local init -+>>>'; flush(6) + print'(/,a)', ' <<<+- damage_local init -+>>>'; flush(IO_STDOUT) !---------------------------------------------------------------------------------------------- ! read numerics parameter and do sanity check diff --git a/src/element.f90 b/src/element.f90 index 3ccc2fb78..722a7fd96 100644 --- a/src/element.f90 +++ b/src/element.f90 @@ -922,7 +922,7 @@ subroutine tElement_init(self,elemType) self%nIPneighbors = size(self%IPneighbor,1) - print'(/,a)', ' <<<+- element_init -+>>>'; flush(6) + print'(/,a)', ' <<<+- element_init -+>>>'; flush(IO_STDOUT) print*, 'element type: ',self%elemType print*, ' geom type: ',self%geomType diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index d33d136f7..f0589c3d7 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -24,7 +24,7 @@ program DAMASK_grid use grid_damage_spectral use grid_thermal_spectral use results - + implicit none !-------------------------------------------------------------------------------------------------- @@ -88,7 +88,7 @@ program DAMASK_grid mech_updateCoords procedure(grid_mech_spectral_basic_restartWrite), pointer :: & mech_restartWrite - + external :: & quit class (tNode), pointer :: & @@ -97,18 +97,18 @@ program DAMASK_grid !-------------------------------------------------------------------------------------------------- ! init DAMASK (all modules) - - call CPFEM_initAll - print'(/,a)', ' <<<+- DAMASK_spectral init -+>>>'; flush(6) - + + call CPFEM_initAll + print'(/,a)', ' <<<+- DAMASK_spectral init -+>>>'; flush(IO_STDOUT) + print*, 'Shanthraj et al., Handbook of Mechanics of Materials, 2019' print*, 'https://doi.org/10.1007/978-981-10-6855-3_80' !-------------------------------------------------------------------------------------------------- ! initialize field solver information nActiveFields = 1 - if(any(thermal_type == THERMAL_conduction_ID)) nActiveFields = nActiveFields + 1 - if(any(damage_type == DAMAGE_nonlocal_ID )) nActiveFields = nActiveFields + 1 + if (any(thermal_type == THERMAL_conduction_ID )) nActiveFields = nActiveFields + 1 + if (any(damage_type == DAMAGE_nonlocal_ID )) nActiveFields = nActiveFields + 1 allocate(solres(nActiveFields)) allocate(newLoadCase%ID(nActiveFields)) @@ -123,16 +123,16 @@ program DAMASK_grid !-------------------------------------------------------------------------------------------------- ! assign mechanics solver depending on selected type - + debug_grid => config_debug%get('grid',defaultVal=emptyList) - select case (trim(num_grid%get_asString('solver', defaultVal = 'Basic'))) + select case (trim(num_grid%get_asString('solver', defaultVal = 'Basic'))) case ('Basic') mech_init => grid_mech_spectral_basic_init mech_forward => grid_mech_spectral_basic_forward mech_solution => grid_mech_spectral_basic_solution mech_updateCoords => grid_mech_spectral_basic_updateCoords mech_restartWrite => grid_mech_spectral_basic_restartWrite - + case ('Polarisation') if(debug_grid%contains('basic')) & call IO_warning(42, ext_msg='debug Divergence') @@ -141,7 +141,7 @@ program DAMASK_grid mech_solution => grid_mech_spectral_polarisation_solution mech_updateCoords => grid_mech_spectral_polarisation_updateCoords mech_restartWrite => grid_mech_spectral_polarisation_restartWrite - + case ('FEM') if(debug_grid%contains('basic')) & call IO_warning(42, ext_msg='debug Divergence') @@ -150,24 +150,24 @@ program DAMASK_grid mech_solution => grid_mech_FEM_solution mech_updateCoords => grid_mech_FEM_updateCoords mech_restartWrite => grid_mech_FEM_restartWrite - + case default call IO_error(error_ID = 891, ext_msg = trim(num_grid%get_asString('solver'))) - + end select - + !-------------------------------------------------------------------------------------------------- -! reading information from load case file and to sanity checks +! reading information from load case file and to sanity checks fileContent = IO_readlines(trim(interface_loadFile)) if(size(fileContent) == 0) call IO_error(307,ext_msg='No load case specified') - + allocate (loadCases(0)) ! array of load cases do currentLoadCase = 1, size(fileContent) line = fileContent(currentLoadCase) if (IO_isBlank(line)) cycle chunkPos = IO_stringPos(line) - + do i = 1, chunkPos(1) ! reading compulsory parameters for loadcase select case (IO_lc(IO_stringValue(line,chunkPos,i))) case('l','fdot','dotf','f') @@ -180,7 +180,7 @@ program DAMASK_grid enddo if ((N_def /= N_n) .or. (N_n /= N_t) .or. N_n < 1) & ! sanity check call IO_error(error_ID=837,el=currentLoadCase,ext_msg = trim(interface_loadFile)) ! error message for incomplete loadcase - + newLoadCase%stress%myType='p' field = 1 newLoadCase%ID(field) = FIELD_MECH_ID ! mechanical active by default @@ -192,7 +192,7 @@ program DAMASK_grid field = field + 1 newLoadCase%ID(field) = FIELD_DAMAGE_ID endif damageActive - + call newLoadCase%rot%fromEulers(real([0.0,0.0,0.0],pReal)) readIn: do i = 1, chunkPos(1) select case (IO_lc(IO_stringValue(line,chunkPos,i))) @@ -211,7 +211,7 @@ program DAMASK_grid if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo newLoadCase%deformation%mask = transpose(reshape(temp_maskVector,[ 3,3])) ! mask in 3x3 notation - newLoadCase%deformation%values = math_9to33(temp_valueVector) ! values in 3x3 notation + newLoadCase%deformation%values = math_9to33(temp_valueVector) ! values in 3x3 notation case('p','stress', 's') temp_valueVector = 0.0_pReal do j = 1, 9 @@ -219,7 +219,7 @@ program DAMASK_grid if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable enddo newLoadCase%stress%mask = transpose(reshape(temp_maskVector,[ 3,3])) - newLoadCase%stress%values = math_9to33(temp_valueVector) + newLoadCase%stress%values = math_9to33(temp_valueVector) case('t','time','delta') ! increment time newLoadCase%time = IO_floatValue(line,chunkPos,i+1) case('n','incs','increments') ! number of increments @@ -256,9 +256,9 @@ program DAMASK_grid call newLoadCase%rot%fromMatrix(math_9to33(temp_valueVector)) end select enddo readIn - + newLoadCase%followFormerTrajectory = merge(.true.,.false.,currentLoadCase > 1) ! by default, guess from previous load case - + reportAndCheck: if (worldrank == 0) then write (loadcase_string, '(i0)' ) currentLoadCase print'(/,a,i0)', ' load case: ', currentLoadCase @@ -277,29 +277,29 @@ program DAMASK_grid endif do i = 1, 3; do j = 1, 3 if(newLoadCase%deformation%mask(i,j)) then - write(6,'(2x,f12.7)',advance='no') newLoadCase%deformation%values(i,j) + write(IO_STDOUT,'(2x,f12.7)',advance='no') newLoadCase%deformation%values(i,j) else - write(6,'(2x,12a)',advance='no') ' * ' + write(IO_STDOUT,'(2x,12a)',advance='no') ' * ' endif - enddo; write(6,'(/)',advance='no') + enddo; write(IO_STDOUT,'(/)',advance='no') enddo if (any(newLoadCase%stress%mask .eqv. newLoadCase%deformation%mask)) errorID = 831 ! exclusive or masking only if (any(newLoadCase%stress%mask .and. transpose(newLoadCase%stress%mask) .and. (math_I3<1))) & errorID = 838 ! no rotation is allowed by stress BC print*, ' stress / GPa:' do i = 1, 3; do j = 1, 3 - if(newLoadCase%stress%mask(i,j)) then - write(6,'(2x,f12.7)',advance='no') newLoadCase%stress%values(i,j)*1e-9_pReal + if(newLoadCase%stress%maskLogical(i,j)) then + write(IO_STDOUT,'(2x,f12.7)',advance='no') newLoadCase%stress%values(i,j)*1e-9_pReal else - write(6,'(2x,12a)',advance='no') ' * ' + write(IO_STDOUT,'(2x,12a)',advance='no') ' * ' endif - enddo; write(6,'(/)',advance='no') + enddo; write(IO_STDOUT,'(/)',advance='no') enddo if (any(abs(matmul(newLoadCase%rot%asMatrix(), & transpose(newLoadCase%rot%asMatrix()))-math_I3) > & reshape(spread(tol_math_check,1,9),[ 3,3]))) errorID = 846 ! given rotation matrix contains strain if (any(dNeq(newLoadCase%rot%asMatrix(), math_I3))) & - write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',& + write(IO_STDOUT,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',& transpose(newLoadCase%rot%asMatrix()) if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment print'(a,f0.3)', ' time: ', newLoadCase%time @@ -322,13 +322,13 @@ program DAMASK_grid select case (loadCases(1)%ID(field)) case(FIELD_MECH_ID) call mech_init - + case(FIELD_THERMAL_ID) call grid_thermal_spectral_init - + case(FIELD_DAMAGE_ID) call grid_damage_spectral_init - + end select enddo @@ -339,22 +339,22 @@ program DAMASK_grid open(newunit=statUnit,file=trim(getSolverJobName())//'.sta',form='FORMATTED',status='REPLACE') write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file if (debug_grid%contains('basic')) print'(/,a)', ' header of statistics file written out' - flush(6) + flush(IO_STDOUT) else writeHeader open(newunit=statUnit,file=trim(getSolverJobName())//& '.sta',form='FORMATTED', position='APPEND', status='OLD') endif writeHeader endif - + writeUndeformed: if (interface_restartInc < 1) then print'(/,a)', ' ... writing initial configuration to file ........................' call CPFEM_results(0,0.0_pReal) endif writeUndeformed - + loadCaseLooping: do currentLoadCase = 1, size(loadCases) time0 = time ! load case start time guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc - + incLooping: do inc = 1, loadCases(currentLoadCase)%incs totalIncsCounter = totalIncsCounter + 1 @@ -379,13 +379,13 @@ program DAMASK_grid endif endif timeinc = timeinc * real(subStepFactor,pReal)**real(-cutBackLevel,pReal) ! depending on cut back level, decrease time step - + skipping: if (totalIncsCounter <= interface_restartInc) then ! not yet at restart inc? time = time + timeinc ! just advance time, skip already performed calculation guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference else skipping stepFraction = 0 ! fraction scaled by stepFactor**cutLevel - + subStepLooping: do while (stepFraction < subStepFactor**cutBackLevel) remainingLoadCaseTime = loadCases(currentLoadCase)%time+time0 - time time = time + timeinc ! forward target time @@ -402,7 +402,7 @@ program DAMASK_grid write(incInfo,'(4(a,i0))') & 'Increment ',totalIncsCounter,'/',sum(loadCases%incs),& '-', stepFraction,'/',subStepFactor**cutBackLevel - flush(6) + flush(IO_STDOUT) !-------------------------------------------------------------------------------------------------- ! forward fields @@ -414,7 +414,7 @@ program DAMASK_grid deformation_BC = loadCases(currentLoadCase)%deformation, & stress_BC = loadCases(currentLoadCase)%stress, & rotation_BC = loadCases(currentLoadCase)%rot) - + case(FIELD_THERMAL_ID); call grid_thermal_spectral_forward(cutBack) case(FIELD_DAMAGE_ID); call grid_damage_spectral_forward(cutBack) end select @@ -435,9 +435,9 @@ program DAMASK_grid case(FIELD_DAMAGE_ID) solres(field) = grid_damage_spectral_solution(timeinc,timeIncOld) end select - + if (.not. solres(field)%converged) exit ! no solution found - + enddo stagIter = stagIter + 1 stagIterate = stagIter < stagItMax & @@ -471,20 +471,20 @@ program DAMASK_grid if (worldrank == 0) close(statUnit) call quit(0) ! quit endif - + enddo subStepLooping - + cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc - + if (all(solres(:)%converged)) then print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' converged' else print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' NOT converged' - endif; flush(6) - + endif; flush(IO_STDOUT) + if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency print'(1/,a)', ' ... writing results to file ......................................' - flush(6) + flush(IO_STDOUT) call CPFEM_results(totalIncsCounter,time) endif if (mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then @@ -492,17 +492,17 @@ program DAMASK_grid call CPFEM_restartWrite endif endif skipping - + enddo incLooping - + enddo loadCaseLooping - - + + !-------------------------------------------------------------------------------------------------- ! report summary of whole calculation print'(/,a)', ' ###########################################################################' if (worldrank == 0) close(statUnit) - + call quit(0) ! no complains ;) - + end program DAMASK_grid diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index 576aaea60..b226c3d0e 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -56,7 +56,7 @@ subroutine discretization_grid_init(restart) myGrid !< domain grid of this process integer, dimension(:), allocatable :: & - microstructureAt + materialAt integer :: & j, & @@ -65,12 +65,12 @@ subroutine discretization_grid_init(restart) integer(C_INTPTR_T) :: & devNull, z, z_offset - print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(6) + print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT) if(index(interface_geomFile,'.vtr') /= 0) then - call readVTR(grid,geomSize,origin,microstructureAt) + call readVTR(grid,geomSize,origin,materialAt) else - call readGeom(grid,geomSize,origin,microstructureAt) + call readGeom(grid,geomSize,origin,materialAt) endif print'(/,a,3(i12 ))', ' grid a b c: ', grid @@ -102,10 +102,9 @@ subroutine discretization_grid_init(restart) !-------------------------------------------------------------------------------------------------- ! general discretization - microstructureAt = microstructureAt(product(grid(1:2))*grid3Offset+1: & - product(grid(1:2))*(grid3Offset+grid3)) ! reallocate/shrink in case of MPI + materialAt = materialAt(product(grid(1:2))*grid3Offset+1:product(grid(1:2))*(grid3Offset+grid3)) ! reallocate/shrink in case of MPI - call discretization_init(microstructureAt, & + call discretization_init(materialAt, & IPcoordinates0(myGrid,mySize,grid3Offset), & Nodes0(myGrid,mySize,grid3Offset),& merge((grid(1)+1) * (grid(2)+1) * (grid3+1),& ! write bottom layer @@ -147,7 +146,7 @@ end subroutine discretization_grid_init !> @details important variables have an implicit "save" attribute. Therefore, this function is ! supposed to be called only once! !-------------------------------------------------------------------------------------------------- -subroutine readGeom(grid,geomSize,origin,microstructure) +subroutine readGeom(grid,geomSize,origin,material) integer, dimension(3), intent(out) :: & grid ! grid (across all processes!) @@ -155,7 +154,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure) geomSize, & ! size (across all processes!) origin ! origin (across all processes!) integer, dimension(:), intent(out), allocatable :: & - microstructure + material character(len=:), allocatable :: rawData character(len=65536) :: line @@ -167,7 +166,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure) startPos, endPos, & myStat, & l, & !< line counter - c, & !< counter for # microstructures in line + c, & !< counter for # materials in line o, & !< order of "to" packing e, & !< "element", i.e. spectral collocation point i, j @@ -266,7 +265,7 @@ subroutine readGeom(grid,geomSize,origin,microstructure) if(any(geomSize < 0.0_pReal)) & call IO_error(error_ID = 842, ext_msg='size (readGeom)') - allocate(microstructure(product(grid)), source = -1) ! too large in case of MPI (shrink later, not very elegant) + allocate(material(product(grid)), source = -1) ! too large in case of MPI (shrink later, not very elegant) !-------------------------------------------------------------------------------------------------- ! read and interpret content @@ -281,18 +280,18 @@ subroutine readGeom(grid,geomSize,origin,microstructure) noCompression: if (chunkPos(1) /= 3) then c = chunkPos(1) - microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)] + material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)] else noCompression compression: if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'of') then c = IO_intValue(line,chunkPos,1) - microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,3),i = 1,IO_intValue(line,chunkPos,1))] + material(e:e+c-1) = [(IO_intValue(line,chunkPos,3),i = 1,IO_intValue(line,chunkPos,1))] else if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'to') then compression c = abs(IO_intValue(line,chunkPos,3) - IO_intValue(line,chunkPos,1)) + 1 o = merge(+1, -1, IO_intValue(line,chunkPos,3) > IO_intValue(line,chunkPos,1)) - microstructure(e:e+c-1) = [(i, i = IO_intValue(line,chunkPos,1),IO_intValue(line,chunkPos,3),o)] + material(e:e+c-1) = [(i, i = IO_intValue(line,chunkPos,1),IO_intValue(line,chunkPos,3),o)] else compression c = chunkPos(1) - microstructure(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)] + material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)] endif compression endif noCompression @@ -308,7 +307,7 @@ end subroutine readGeom !> @brief Parse vtk rectilinear grid (.vtr) !> @details https://vtk.org/Wiki/VTK_XML_Formats !-------------------------------------------------------------------------------------------------- -subroutine readVTR(grid,geomSize,origin,microstructure) +subroutine readVTR(grid,geomSize,origin,material) integer, dimension(3), intent(out) :: & grid ! grid (across all processes!) @@ -316,7 +315,7 @@ subroutine readVTR(grid,geomSize,origin,microstructure) geomSize, & ! size (across all processes!) origin ! origin (across all processes!) integer, dimension(:), intent(out), allocatable :: & - microstructure + material character(len=:), allocatable :: fileContent, dataType, headerType logical :: inFile,inGrid,gotCoordinates,gotCellData,compressed @@ -364,11 +363,9 @@ subroutine readVTR(grid,geomSize,origin,microstructure) else if(index(fileContent(startPos:endPos),'',kind=pI64) /= 0_pI64) then gotCellData = .true. - startPos = endPos + 2_pI64 do while (index(fileContent(startPos:endPos),'',kind=pI64) == 0_pI64) - endPos = startPos + index(fileContent(startPos:),IO_EOL,kind=pI64) - 2_pI64 if(index(fileContent(startPos:endPos),'',kind=pI64) /= 0_pI64) then gotCoordinates = .true. @@ -415,10 +413,10 @@ subroutine readVTR(grid,geomSize,origin,microstructure) end do - if(.not. allocated(microstructure)) call IO_error(error_ID = 844, ext_msg='materialpoint not found') - if(size(microstructure) /= product(grid)) call IO_error(error_ID = 844, ext_msg='size(materialpoint)') - if(any(geomSize<=0)) call IO_error(error_ID = 844, ext_msg='size') - if(any(grid<1)) call IO_error(error_ID = 844, ext_msg='grid') + if(.not. allocated(material)) call IO_error(error_ID = 844, ext_msg='material data not found') + if(size(material) /= product(grid)) call IO_error(error_ID = 844, ext_msg='size(material)') + if(any(geomSize<=0)) call IO_error(error_ID = 844, ext_msg='size') + if(any(grid<1)) call IO_error(error_ID = 844, ext_msg='grid') contains diff --git a/src/grid/grid_damage_spectral.f90 b/src/grid/grid_damage_spectral.f90 index 2a0754c66..27dc5c1bd 100644 --- a/src/grid/grid_damage_spectral.f90 +++ b/src/grid/grid_damage_spectral.f90 @@ -205,10 +205,10 @@ function grid_damage_spectral_solution(timeinc,timeinc_old) result(solution) call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr) if (solution%converged) & print'(/,a)', ' ... nonlocal damage converged .....................................' - write(6,'(/,a,f8.6,2x,f8.6,2x,e11.4,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',& + write(IO_STDOUT,'(/,a,f8.6,2x,f8.6,2x,e11.4,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',& phi_min, phi_max, stagNorm print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end function grid_damage_spectral_solution diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index a6f713adc..0fd7315a4 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -116,7 +116,7 @@ subroutine grid_mech_FEM_init num_grid, & debug_grid - print'(/,a)', ' <<<+- grid_mech_FEM init -+>>>'; flush(6) + print'(/,a)', ' <<<+- grid_mech_FEM init -+>>>'; flush(IO_STDOUT) !------------------------------------------------------------------------------------------------- ! debugging options @@ -408,7 +408,7 @@ subroutine grid_mech_FEM_restartWrite call DMDAVecGetArrayF90(mech_grid,solution_current,u_current,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr); CHKERRQ(ierr) - print*, 'writing solver data required for restart to file'; flush(6) + print*, 'writing solver data required for restart to file'; flush(IO_STDOUT) write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5' fileHandle = HDF5_openFile(fileName,'w') @@ -476,7 +476,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i print'(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', & err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')' print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end subroutine converged @@ -510,11 +510,11 @@ subroutine formResidual(da_local,x_local, & totalIter = totalIter + 1 print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter+1, '≤', num%itmax if (debugRotation) & - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.)) - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim =', transpose(F_aim) - flush(6) + flush(IO_STDOUT) endif newIteration !-------------------------------------------------------------------------------------------------- diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 4ed41cb7b..8072f49b9 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -106,7 +106,7 @@ subroutine grid_mech_spectral_basic_init num_grid, & debug_grid - print'(/,a)', ' <<<+- grid_mech_spectral_basic init -+>>>'; flush(6) + print'(/,a)', ' <<<+- grid_mech_spectral_basic init -+>>>'; flush(IO_STDOUT) print*, 'Eisenlohr et al., International Journal of Plasticity 46:37–53, 2013' print*, 'https://doi.org/10.1016/j.ijplas.2012.09.012'//IO_EOL @@ -118,18 +118,18 @@ subroutine grid_mech_spectral_basic_init ! debugging options debug_grid => config_debug%get('grid', defaultVal=emptyList) debugRotation = debug_grid%contains('rotation') - + !------------------------------------------------------------------------------------------------- ! read numerical parameters and do sanity checks num_grid => config_numerics%get('grid',defaultVal=emptyDict) - num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.) - num%eps_div_atol = num_grid%get_asFloat('eps_div_atol', defaultVal=1.0e-4_pReal) - num%eps_div_rtol = num_grid%get_asFloat('eps_div_rtol', defaultVal=5.0e-4_pReal) - num%eps_stress_atol = num_grid%get_asFloat('eps_stress_atol',defaultVal=1.0e3_pReal) + num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.) + num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal) + num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal) + num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol',defaultVal=1.0e3_pReal) num%eps_stress_rtol = num_grid%get_asFloat('eps_stress_rtol',defaultVal=1.0e-3_pReal) - num%itmin = num_grid%get_asInt ('itmin', defaultVal=1) - num%itmax = num_grid%get_asInt ('itmax', defaultVal=250) + num%itmin = num_grid%get_asInt ('itmin',defaultVal=1) + num%itmax = num_grid%get_asInt ('itmax',defaultVal=250) if (num%eps_div_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_div_atol') if (num%eps_div_rtol < 0.0_pReal) call IO_error(301,ext_msg='eps_div_rtol') @@ -137,7 +137,7 @@ subroutine grid_mech_spectral_basic_init if (num%eps_stress_rtol < 0.0_pReal) call IO_error(301,ext_msg='eps_stress_rtol') if (num%itmax <= 1) call IO_error(301,ext_msg='itmax') if (num%itmin > num%itmax .or. num%itmin < 1) call IO_error(301,ext_msg='itmin') - + !-------------------------------------------------------------------------------------------------- ! set default and user defined options for PETSc call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr) @@ -154,7 +154,7 @@ subroutine grid_mech_spectral_basic_init ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mech_',ierr);CHKERRQ(ierr) - localK = 0 + localK = 0 localK(worldrank) = grid3 call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,ierr) call DMDACreate3d(PETSC_COMM_WORLD, & @@ -370,7 +370,7 @@ subroutine grid_mech_spectral_basic_restartWrite call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) - print*, 'writing solver data required for restart to file'; flush(6) + print*, 'writing solver data required for restart to file'; flush(IO_STDOUT) write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5' fileHandle = HDF5_openFile(fileName,'w') @@ -436,7 +436,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm print'(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', & err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')' print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end subroutine converged @@ -471,11 +471,11 @@ subroutine formResidual(in, F, & totalIter = totalIter + 1 print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax if (debugRotation) & - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.)) - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim =', transpose(F_aim) - flush(6) + flush(IO_STDOUT) endif newIteration !-------------------------------------------------------------------------------------------------- @@ -502,7 +502,7 @@ subroutine formResidual(in, F, & !-------------------------------------------------------------------------------------------------- ! constructing residual - residuum = tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) ! Gamma*P gives correction towards div(P) = 0, so needs to be zero, too + residuum = tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) ! Gamma*P gives correction towards div(P) = 0, so needs to be zero, too end subroutine formResidual diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index b8bbcacd8..f59b68d7a 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -119,7 +119,7 @@ subroutine grid_mech_spectral_polarisation_init num_grid, & debug_grid - print'(/,a)', ' <<<+- grid_mech_spectral_polarisation init -+>>>'; flush(6) + print'(/,a)', ' <<<+- grid_mech_spectral_polarisation init -+>>>'; flush(IO_STDOUT) print*, 'Shanthraj et al., International Journal of Plasticity 66:31–45, 2015' print*, 'https://doi.org/10.1016/j.ijplas.2014.02.006' @@ -428,7 +428,7 @@ subroutine grid_mech_spectral_polarisation_restartWrite F => FandF_tau(0: 8,:,:,:) F_tau => FandF_tau(9:17,:,:,:) - print*, 'writing solver data required for restart to file'; flush(6) + print*, 'writing solver data required for restart to file'; flush(IO_STDOUT) write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5' fileHandle = HDF5_openFile(fileName,'w') @@ -500,7 +500,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm print '(a,f12.2,a,es8.2,a,es9.2,a)', ' error stress BC = ', & err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')' print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end subroutine converged @@ -554,11 +554,11 @@ subroutine formResidual(in, FandF_tau, & totalIter = totalIter + 1 print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax if(debugRotation) & - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.)) - write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & + write(IO_STDOUT,'(/,a,/,3(3(f12.7,1x)/))',advance='no') & ' deformation gradient aim =', transpose(F_aim) - flush(6) + flush(IO_STDOUT) endif newIteration !-------------------------------------------------------------------------------------------------- diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index eeff8c9d6..49be5ad7e 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -199,10 +199,10 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution) call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr) if (solution%converged) & print'(/,a)', ' ... thermal conduction converged ..................................' - write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',& + write(IO_STDOUT,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',& T_min, T_max, stagNorm print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end function grid_thermal_spectral_solution diff --git a/src/grid/spectral_utilities.f90 b/src/grid/spectral_utilities.f90 index 896337bf6..73aaa7789 100644 --- a/src/grid/spectral_utilities.f90 +++ b/src/grid/spectral_utilities.f90 @@ -210,7 +210,7 @@ subroutine spectral_utilities_init if(debugPETSc) print'(3(/,a),/)', & ' Initializing PETSc with debug options: ', & trim(PETScDebug), & - ' add more using the PETSc_Options keyword in numerics.yaml '; flush(6) + ' add more using the PETSc_Options keyword in numerics.yaml '; flush(IO_STDOUT) num_grid => config_numerics%get('grid',defaultVal=emptyDict) @@ -279,7 +279,7 @@ subroutine spectral_utilities_init if (pReal /= C_DOUBLE .or. kind(1) /= C_INT) error stop 'C and Fortran datatypes do not match' call fftw_set_timelimit(num_grid%get_asFloat('fftw_timelimit',defaultVal=-1.0_pReal)) - print*, 'FFTW initialized'; flush(6) + print*, 'FFTW initialized'; flush(IO_STDOUT) !-------------------------------------------------------------------------------------------------- ! MPI allocation @@ -506,7 +506,7 @@ subroutine utilities_fourierGammaConvolution(fieldAim) print'(/,a)', ' ... doing gamma convolution ...............................................' - flush(6) + flush(IO_STDOUT) !-------------------------------------------------------------------------------------------------- ! do the actual spectral method calculation (mechanical equilibrium) @@ -576,7 +576,7 @@ real(pReal) function utilities_divergenceRMS() complex(pReal), dimension(3) :: rescaledGeom print'(/,a)', ' ... calculating divergence ................................................' - flush(6) + flush(IO_STDOUT) rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal) @@ -620,7 +620,7 @@ real(pReal) function utilities_curlRMS() complex(pReal), dimension(3) :: rescaledGeom print'(/,a)', ' ... calculating curl ......................................................' - flush(6) + flush(IO_STDOUT) rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal) @@ -700,9 +700,9 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) if(debugGeneral) then print'(/,a)', ' ... updating masked compliance ............................................' - write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',& + write(IO_STDOUT,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',& transpose(temp99_Real)*1.0e-9_pReal - flush(6) + flush(IO_STDOUT) endif do i = 1,9; do j = 1,9 @@ -722,9 +722,9 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) if (debugGeneral .or. errmatinv) then write(formatString, '(i2)') size_reduced formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))' - write(6,trim(formatString),advance='no') ' C * S (load) ', & + write(IO_STDOUT,trim(formatString),advance='no') ' C * S (load) ', & transpose(matmul(c_reduced,s_reduced)) - write(6,trim(formatString),advance='no') ' S (load) ', transpose(s_reduced) + write(IO_STDOUT,trim(formatString),advance='no') ' S (load) ', transpose(s_reduced) if(errmatinv) call IO_error(error_ID=400,ext_msg='utilities_maskedCompliance') endif temp99_real = reshape(unpack(reshape(s_reduced,[size_reduced**2]),reshape(mask,[81]),0.0_pReal),[9,9]) @@ -735,9 +735,9 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C) utilities_maskedCompliance = math_99to3333(temp99_Real) if(debugGeneral) then - write(6,'(/,a,/,9(9(2x,f10.5,1x)/),/)',advance='no') & + write(IO_STDOUT,'(/,a,/,9(9(2x,f10.5,1x)/),/)',advance='no') & ' Masked Compliance (load) * GPa =', transpose(temp99_Real)*1.0e9_pReal - flush(6) + flush(IO_STDOUT) endif end function utilities_maskedCompliance @@ -822,7 +822,7 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,& real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF print'(/,a)', ' ... evaluating constitutive response ......................................' - flush(6) + flush(IO_STDOUT) materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3]) ! set materialpoint target F to estimated field @@ -832,13 +832,13 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,& P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) if (debugRotation) & - write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',& + write(IO_STDOUT,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',& transpose(P_av)*1.e-6_pReal if(present(rotation_BC)) & P_av = rotation_BC%rotate(P_av) - write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& + write(IO_STDOUT,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& transpose(P_av)*1.e-6_pReal - flush(6) + flush(IO_STDOUT) dPdF_max = 0.0_pReal dPdF_norm_max = 0.0_pReal @@ -1094,7 +1094,7 @@ subroutine utilities_saveReferenceStiffness fileUnit,ierr if (worldrank == 0) then - print'(a)', ' writing reference stiffness data required for restart to file'; flush(6) + print'(a)', ' writing reference stiffness data required for restart to file'; flush(IO_STDOUT) open(newunit=fileUnit, file=getSolverJobName()//'.C_ref',& status='replace',access='stream',action='write',iostat=ierr) if(ierr /=0) call IO_error(100,ext_msg='could not open file '//getSolverJobName()//'.C_ref') diff --git a/src/homogenization.f90 b/src/homogenization.f90 index cc8df77f4..0e7f1bf3a 100644 --- a/src/homogenization.f90 +++ b/src/homogenization.f90 @@ -186,7 +186,7 @@ subroutine homogenization_init materialpoint_F = materialpoint_F0 ! initialize to identity allocate(materialpoint_P(3,3,discretization_nIP,discretization_nElem), source=0.0_pReal) - print'(/,a)', ' <<<+- homogenization init -+>>>'; flush(6) + print'(/,a)', ' <<<+- homogenization init -+>>>'; flush(IO_STDOUT) num%nMPstate = num_homogGeneric%get_asInt ('nMPstate', defaultVal=10) num%subStepMinHomog = num_homogGeneric%get_asFloat('subStepMin', defaultVal=1.0e-3_pReal) diff --git a/src/homogenization_mech_RGC.f90 b/src/homogenization_mech_RGC.f90 index cdd7c05dd..f0485b244 100644 --- a/src/homogenization_mech_RGC.f90 +++ b/src/homogenization_mech_RGC.f90 @@ -95,7 +95,7 @@ module subroutine mech_RGC_init(num_homogMech) print'(/,a)', ' <<<+- homogenization_mech_rgc init -+>>>' Ninstance = count(homogenization_type == HOMOGENIZATION_RGC_ID) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) print*, 'Tjahjanto et al., International Journal of Material Forming 2(1):939–942, 2009' print*, 'https://doi.org/10.1007/s12289-009-0619-1'//IO_EOL @@ -247,7 +247,7 @@ module subroutine mech_RGC_partitionDeformation(F,avgF,instance,of) print'(1x,3(e15.8,1x))',(F(i,j,iGrain), j = 1,3) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif enddo @@ -376,7 +376,7 @@ module procedure mech_RGC_updateState '@ grain ',stresLoc(3),' in component ',stresLoc(1),stresLoc(2) print'(a,e15.8,a,i3,a,i2)',' Max residual: ',residMax, & ' @ iface ',residLoc(1),' in direction ',residLoc(2) - flush(6) + flush(IO_STDOUT) endif #endif @@ -388,7 +388,7 @@ module procedure mech_RGC_updateState mech_RGC_updateState = .true. #ifdef DEBUG if (debugHomog%extensive .and. prm%of_debug == of) & - print*, '... done and happy'; flush(6) + print*, '... done and happy'; flush(IO_STDOUT) #endif !-------------------------------------------------------------------------------------------------- @@ -416,7 +416,7 @@ module procedure mech_RGC_updateState print'(a,e15.8,/)', ' Volume discrepancy: ', dst%volumeDiscrepancy(of) print'(a,e15.8)', ' Maximum relaxation rate: ', dst%relaxationRate_max(of) print'(a,e15.8,/)', ' Average relaxation rate: ', dst%relaxationRate_avg(of) - flush(6) + flush(IO_STDOUT) endif #endif @@ -429,7 +429,7 @@ module procedure mech_RGC_updateState #ifdef DEBUG if (debugHomog%extensive .and. prm%of_debug == of) & - print'(a,/)', ' ... broken'; flush(6) + print'(a,/)', ' ... broken'; flush(IO_STDOUT) #endif return @@ -437,7 +437,7 @@ module procedure mech_RGC_updateState else ! proceed with computing the Jacobian and state update #ifdef DEBUG if (debugHomog%extensive .and. prm%of_debug == of) & - print'(a,/)', ' ... not yet done'; flush(6) + print'(a,/)', ' ... not yet done'; flush(IO_STDOUT) #endif endif @@ -499,7 +499,7 @@ module procedure mech_RGC_updateState print'(1x,100(e11.4,1x))',(smatrix(i,j), j = 1,3*nIntFaceTot) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif @@ -559,7 +559,7 @@ module procedure mech_RGC_updateState print'(1x,100(e11.4,1x))',(pmatrix(i,j), j = 1,3*nIntFaceTot) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif @@ -578,7 +578,7 @@ module procedure mech_RGC_updateState print'(1x,100(e11.4,1x))',(rmatrix(i,j), j = 1,3*nIntFaceTot) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif @@ -593,7 +593,7 @@ module procedure mech_RGC_updateState print'(1x,100(e11.4,1x))',(jmatrix(i,j), j = 1,3*nIntFaceTot) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif @@ -609,7 +609,7 @@ module procedure mech_RGC_updateState print'(1x,100(e11.4,1x))',(jnverse(i,j), j = 1,3*nIntFaceTot) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif @@ -625,7 +625,7 @@ module procedure mech_RGC_updateState !$OMP CRITICAL (write2out) print'(a,i3,a,i3,a)',' RGC_updateState: ip ',ip,' | el ',el,' enforces cutback' print'(a,e15.8)',' due to large relaxation change = ',maxval(abs(drelax)) - flush(6) + flush(IO_STDOUT) !$OMP END CRITICAL (write2out) endif @@ -636,7 +636,7 @@ module procedure mech_RGC_updateState print'(1x,2(e15.8,1x))', stt%relaxationVector(i,of) enddo print*,' ' - flush(6) + flush(IO_STDOUT) endif #endif diff --git a/src/homogenization_mech_isostrain.f90 b/src/homogenization_mech_isostrain.f90 index 6c5f50b99..5138afa73 100644 --- a/src/homogenization_mech_isostrain.f90 +++ b/src/homogenization_mech_isostrain.f90 @@ -40,7 +40,7 @@ module subroutine mech_isostrain_init print'(/,a)', ' <<<+- homogenization_mech_isostrain init -+>>>' Ninstance = count(homogenization_type == HOMOGENIZATION_ISOSTRAIN_ID) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) allocate(param(Ninstance)) ! one container of parameters per instance diff --git a/src/homogenization_mech_none.f90 b/src/homogenization_mech_none.f90 index d9426ef50..3cbec5911 100644 --- a/src/homogenization_mech_none.f90 +++ b/src/homogenization_mech_none.f90 @@ -21,7 +21,7 @@ module subroutine mech_none_init print'(/,a)', ' <<<+- homogenization_mech_none init -+>>>' Ninstance = count(homogenization_type == HOMOGENIZATION_NONE_ID) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) do h = 1, size(homogenization_type) if (homogenization_type(h) /= HOMOGENIZATION_NONE_ID) cycle diff --git a/src/kinematics_cleavage_opening.f90 b/src/kinematics_cleavage_opening.f90 index d8f25f8b8..23f348831 100644 --- a/src/kinematics_cleavage_opening.f90 +++ b/src/kinematics_cleavage_opening.f90 @@ -49,7 +49,7 @@ module function kinematics_cleavage_opening_init(kinematics_length) result(myKin myKinematics = kinematics_active('cleavage_opening',kinematics_length) Ninstance = count(myKinematics) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/kinematics_slipplane_opening.f90 b/src/kinematics_slipplane_opening.f90 index 3b04e37c1..660483b90 100644 --- a/src/kinematics_slipplane_opening.f90 +++ b/src/kinematics_slipplane_opening.f90 @@ -52,7 +52,7 @@ module function kinematics_slipplane_opening_init(kinematics_length) result(myKi myKinematics = kinematics_active('slipplane_opening',kinematics_length) Ninstance = count(myKinematics) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/kinematics_thermal_expansion.f90 b/src/kinematics_thermal_expansion.f90 index 652713aa4..93a48e035 100644 --- a/src/kinematics_thermal_expansion.f90 +++ b/src/kinematics_thermal_expansion.f90 @@ -42,7 +42,7 @@ module function kinematics_thermal_expansion_init(kinematics_length) result(myKi myKinematics = kinematics_active('thermal_expansion',kinematics_length) Ninstance = count(myKinematics) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/lattice.f90 b/src/lattice.f90 index d0ac07ed6..69305c839 100644 --- a/src/lattice.f90 +++ b/src/lattice.f90 @@ -457,7 +457,7 @@ subroutine lattice_init phase, & elasticity - print'(/,a)', ' <<<+- lattice init -+>>>'; flush(6) + print'(/,a)', ' <<<+- lattice init -+>>>'; flush(IO_STDOUT) phases => config_material%get('phase') Nphases = phases%length diff --git a/src/marc/discretization_marc.f90 b/src/marc/discretization_marc.f90 index 91aa0106c..9696572e5 100644 --- a/src/marc/discretization_marc.f90 +++ b/src/marc/discretization_marc.f90 @@ -52,7 +52,7 @@ subroutine discretization_marc_init type(tElement) :: elem integer, dimension(:), allocatable :: & - microstructureAt + materialAt integer:: & Nnodes, & !< total number of nodes in the mesh Nelems, & !< total number of elements in the mesh @@ -83,7 +83,7 @@ subroutine discretization_marc_init mesh_unitlength = num_commercialFEM%get_asFloat('unitlength',defaultVal=1.0_pReal) ! set physical extent of a length unit in mesh if (mesh_unitlength <= 0.0_pReal) call IO_error(301,ext_msg='unitlength') - call inputRead(elem,node0_elem,connectivity_elem,microstructureAt) + call inputRead(elem,node0_elem,connectivity_elem,materialAt) nElems = size(connectivity_elem,2) if (debug_e < 1 .or. debug_e > nElems) call IO_error(602,ext_msg='element') @@ -103,7 +103,7 @@ subroutine discretization_marc_init call buildIPcoordinates(IP_reshaped,reshape(connectivity_cell,[elem%NcellNodesPerCell,& elem%nIPs*nElems]),node0_cell) - call discretization_init(microstructureAt,& + call discretization_init(materialAt,& IP_reshaped,& node0_cell) @@ -172,7 +172,7 @@ end subroutine writeGeometry !-------------------------------------------------------------------------------------------------- !> @brief Read mesh from marc input file !-------------------------------------------------------------------------------------------------- -subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt) +subroutine inputRead(elem,node0_elem,connectivity_elem,materialAt) type(tElement), intent(out) :: elem real(pReal), dimension(:,:), allocatable, intent(out) :: & @@ -180,7 +180,7 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt) integer, dimension(:,:), allocatable, intent(out) :: & connectivity_elem integer, dimension(:), allocatable, intent(out) :: & - microstructureAt + materialAt integer :: & fileFormatVersion, & @@ -226,9 +226,9 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,microstructureAt) connectivity_elem = inputRead_connectivityElem(nElems,elem%nNodes,inputFile) - call inputRead_microstructure(microstructureAt, & - nElems,elem%nNodes,nameElemSet,mapElemSet,& - initialcondTableStyle,inputFile) + call inputRead_material(materialAt, & + nElems,elem%nNodes,nameElemSet,mapElemSet,& + initialcondTableStyle,inputFile) end subroutine inputRead @@ -675,13 +675,13 @@ end function inputRead_connectivityElem !-------------------------------------------------------------------------------------------------- -!> @brief Store microstructure ID +!> @brief Store material ID !-------------------------------------------------------------------------------------------------- -subroutine inputRead_microstructure(microstructureAt,& - nElem,nNodes,nameElemSet,mapElemSet,initialcondTableStyle,fileContent) +subroutine inputRead_material(materialAt,& + nElem,nNodes,nameElemSet,mapElemSet,initialcondTableStyle,fileContent) integer, dimension(:), allocatable, intent(out) :: & - microstructureAt + materialAt integer, intent(in) :: & nElem, & nNodes, & !< number of nodes per element @@ -696,7 +696,7 @@ subroutine inputRead_microstructure(microstructureAt,& integer :: i,j,t,sv,myVal,e,nNodesAlreadyRead,l,k,m - allocate(microstructureAt(nElem),source=0) + allocate(materialAt(nElem),source=0) do l = 1, size(fileContent) chunkPos = IO_stringPos(fileContent(l)) @@ -715,7 +715,7 @@ subroutine inputRead_microstructure(microstructureAt,& contInts = continuousIntValues(fileContent(l+k+m+1:),nElem,nameElemSet,mapElemSet,size(nameElemSet)) ! get affected elements do i = 1,contInts(1) e = mesh_FEM2DAMASK_elem(contInts(1+i)) - microstructureAt(e) = myVal + materialAt(e) = myVal enddo if (initialcondTableStyle == 0) m = m + 1 enddo @@ -723,9 +723,9 @@ subroutine inputRead_microstructure(microstructureAt,& endif enddo - if(any(microstructureAt < 1)) call IO_error(180) + if(any(materialAt < 1)) call IO_error(180) -end subroutine inputRead_microstructure +end subroutine inputRead_material !-------------------------------------------------------------------------------------------------- diff --git a/src/material.f90 b/src/material.f90 index cabc57835..30c5f09a0 100644 --- a/src/material.f90 +++ b/src/material.f90 @@ -164,7 +164,7 @@ subroutine material_init(restart) material_homogenization character(len=pStringLen) :: sectionName - print'(/,a)', ' <<<+- material init -+>>>'; flush(6) + print'(/,a)', ' <<<+- material init -+>>>'; flush(IO_STDOUT) phases => config_material%get('phase') allocate(material_name_phase(phases%length)) diff --git a/src/math.f90 b/src/math.f90 index b835a35b2..163f4df6a 100644 --- a/src/math.f90 +++ b/src/math.f90 @@ -91,7 +91,7 @@ subroutine math_init class(tNode), pointer :: & num_generic - print'(/,a)', ' <<<+- math init -+>>>'; flush(6) + print'(/,a)', ' <<<+- math init -+>>>'; flush(IO_STDOUT) num_generic => config_numerics%get('generic',defaultVal=emptyDict) randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0) diff --git a/src/mesh/DAMASK_mesh.f90 b/src/mesh/DAMASK_mesh.f90 index 0ba56bef3..bfa8d22ce 100644 --- a/src/mesh/DAMASK_mesh.f90 +++ b/src/mesh/DAMASK_mesh.f90 @@ -78,7 +78,7 @@ program DAMASK_mesh !-------------------------------------------------------------------------------------------------- ! init DAMASK (all modules) call CPFEM_initAll - print'(/,a)', ' <<<+- DAMASK_mesh init -+>>>'; flush(6) + print'(/,a)', ' <<<+- DAMASK_mesh init -+>>>'; flush(IO_STDOUT) !--------------------------------------------------------------------- ! reading field information from numerics file and do sanity checks @@ -299,7 +299,7 @@ program DAMASK_mesh write(incInfo,'(4(a,i0))') & 'Increment ',totalIncsCounter,'/',sum(loadCases%incs),& '-',stepFraction, '/', subStepFactor**cutBackLevel - flush(6) + flush(IO_STDOUT) !-------------------------------------------------------------------------------------------------- ! forward fields @@ -363,7 +363,7 @@ program DAMASK_mesh print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' converged' else print'(/,a,i0,a)', ' increment ', totalIncsCounter, ' NOT converged' - endif; flush(6) + endif; flush(IO_STDOUT) if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency print'(/,a)', ' ... writing results to file ......................................' diff --git a/src/mesh/FEM_utilities.f90 b/src/mesh/FEM_utilities.f90 index 1e154e533..4d9786112 100644 --- a/src/mesh/FEM_utilities.f90 +++ b/src/mesh/FEM_utilities.f90 @@ -122,7 +122,7 @@ subroutine FEM_utilities_init ' Initializing PETSc with debug options: ', & trim(PETScDebug), & ' add more using the PETSc_Options keyword in numerics.yaml ' - flush(6) + flush(IO_STDOUT) call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr) CHKERRQ(ierr) if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr) diff --git a/src/mesh/mesh_mech_FEM.f90 b/src/mesh/mesh_mech_FEM.f90 index 6c8d4667d..de1f0c687 100644 --- a/src/mesh/mesh_mech_FEM.f90 +++ b/src/mesh/mesh_mech_FEM.f90 @@ -110,7 +110,7 @@ subroutine FEM_mech_init(fieldBC) class(tNode), pointer :: & num_mesh - print'(/,a)', ' <<<+- FEM_mech init -+>>>'; flush(6) + print'(/,a)', ' <<<+- FEM_mech init -+>>>'; flush(IO_STDOUT) !----------------------------------------------------------------------------- ! read numerical parametes and do sanity checks @@ -319,7 +319,7 @@ type(tSolutionState) function FEM_mech_solution( & endif print'(/,a)', ' ===========================================================================' - flush(6) + flush(IO_STDOUT) end function FEM_mech_solution @@ -682,9 +682,9 @@ subroutine FEM_mech_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dumm print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), & ' @ Iteration ',PETScIter,' mechanical residual norm = ', & int(fnorm/divTol),fnorm/divTol-int(fnorm/divTol) - write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& + write(IO_STDOUT,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',& transpose(P_av)*1.e-6_pReal - flush(6) + flush(IO_STDOUT) end subroutine FEM_mech_converged diff --git a/src/results.f90 b/src/results.f90 index 686183919..aec90d7be 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -65,7 +65,7 @@ subroutine results_init(restart) character(len=pStringLen) :: commandLine - print'(/,a)', ' <<<+- results init -+>>>'; flush(6) + print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT) print*, 'Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017' print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL diff --git a/src/rotations.f90 b/src/rotations.f90 index 04490fe3c..0b72c1dd5 100644 --- a/src/rotations.f90 +++ b/src/rotations.f90 @@ -104,7 +104,7 @@ contains subroutine rotations_init call quaternions_init - print'(/,a)', ' <<<+- rotations init -+>>>'; flush(6) + print'(/,a)', ' <<<+- rotations init -+>>>'; flush(IO_STDOUT) print*, 'Rowenhorst et al., Modelling and Simulation in Materials Science and Engineering 23:083501, 2015' print*, 'https://doi.org/10.1088/0965-0393/23/8/083501' diff --git a/src/source_damage_anisoBrittle.f90 b/src/source_damage_anisoBrittle.f90 index 6dd58fe5b..7911d6d0a 100644 --- a/src/source_damage_anisoBrittle.f90 +++ b/src/source_damage_anisoBrittle.f90 @@ -53,7 +53,7 @@ module function source_damage_anisoBrittle_init(source_length) result(mySources) mySources = source_active('damage_anisoBrittle',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/source_damage_anisoDuctile.f90 b/src/source_damage_anisoDuctile.f90 index 37681a23f..52189c839 100644 --- a/src/source_damage_anisoDuctile.f90 +++ b/src/source_damage_anisoDuctile.f90 @@ -47,7 +47,7 @@ module function source_damage_anisoDuctile_init(source_length) result(mySources) mySources = source_active('damage_anisoDuctile',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/source_damage_isoBrittle.f90 b/src/source_damage_isoBrittle.f90 index c6d0ada99..714e71ef1 100644 --- a/src/source_damage_isoBrittle.f90 +++ b/src/source_damage_isoBrittle.f90 @@ -43,7 +43,7 @@ module function source_damage_isoBrittle_init(source_length) result(mySources) mySources = source_active('damage_isoBrittle',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/source_damage_isoDuctile.f90 b/src/source_damage_isoDuctile.f90 index 1c1c53fd0..493183d75 100644 --- a/src/source_damage_isoDuctile.f90 +++ b/src/source_damage_isoDuctile.f90 @@ -45,7 +45,7 @@ module function source_damage_isoDuctile_init(source_length) result(mySources) mySources = source_active('damage_isoDuctile',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/source_thermal_dissipation.f90 b/src/source_thermal_dissipation.f90 index 60fde7f28..5cc740424 100644 --- a/src/source_thermal_dissipation.f90 +++ b/src/source_thermal_dissipation.f90 @@ -41,7 +41,7 @@ module function source_thermal_dissipation_init(source_length) result(mySources) mySources = source_active('thermal_dissipation',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase') diff --git a/src/source_thermal_externalheat.f90 b/src/source_thermal_externalheat.f90 index e7bfea254..4a644f53b 100644 --- a/src/source_thermal_externalheat.f90 +++ b/src/source_thermal_externalheat.f90 @@ -45,7 +45,7 @@ module function source_thermal_externalheat_init(source_length) result(mySources mySources = source_active('thermal_externalheat',source_length) Ninstance = count(mySources) - print'(a,i2)', ' # instances: ',Ninstance; flush(6) + print'(a,i2)', ' # instances: ',Ninstance; flush(IO_STDOUT) if(Ninstance == 0) return phases => config_material%get('phase')