From 07d9ef9ba120b29a71e6407da64575e3e2dbb5d5 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 10:41:35 -0400 Subject: [PATCH 01/35] change indentation to 4 sapces PEP8 checking suggestions --- lib/damask/h5table.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/damask/h5table.py b/lib/damask/h5table.py index f0c3fd72f..94f3421ae 100644 --- a/lib/damask/h5table.py +++ b/lib/damask/h5table.py @@ -18,9 +18,9 @@ import xml.etree.cElementTree as ET # on Python 2&3 # # ---------------------------------------------------------------- # try: - test=isinstance('test', unicode) + test = isinstance('test', unicode) except(NameError): - unicode=str + unicode = str def lables_to_path(label, dsXMLPath=None): From 353b5b6994c30fcbb91790cd0b5758ea389cd045 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 10:42:03 -0400 Subject: [PATCH 02/35] avoid doctring as argument --- processing/post/vtk_addRectilinearGridData.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 78d599f0b..4848e0efc 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -6,6 +6,7 @@ import damask from vtk.util import numpy_support from collections import defaultdict from optparse import OptionParser +from damask.h5table import lables_to_path scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) @@ -14,10 +15,12 @@ scriptID = ' '.join([scriptName,damask.version]) # MAIN # -------------------------------------------------------------------- -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ -Add scalars, vectors, and/or an RGB tuple from an ASCIItable to existing VTK rectilinear grid (.vtr/.vtk). - -""", version = scriptID) +msg = "Add scalars, vectors, and/or an RGB tuple from" +msg += "an ASCIItable to existing VTK rectilinear grid (.vtr/.vtk)." +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description = msg, + version = scriptID) parser.add_option( '--vtk', dest = 'vtk', From 81ada5ffde06456c62b73a3dd6e421ec28be8c83 Mon Sep 17 00:00:00 2001 From: Test User Date: Fri, 14 Oct 2016 16:42:18 +0200 Subject: [PATCH 03/35] updated version information after successful test of v2.0.1-213-g75149cc --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index fa2ed41e2..da01b9df1 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-201-g7278605 +v2.0.1-213-g75149cc From e5e2eef375ef619a8f98678c4e7813e8726fa0d2 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 10:45:42 -0400 Subject: [PATCH 04/35] add script generate vtr file from HDF5 following "let-it-fail" design --- processing/post/h5_vtkRectilinearGrid.py | 135 +++++++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100755 processing/post/h5_vtkRectilinearGrid.py diff --git a/processing/post/h5_vtkRectilinearGrid.py b/processing/post/h5_vtkRectilinearGrid.py new file mode 100755 index 000000000..e8a615986 --- /dev/null +++ b/processing/post/h5_vtkRectilinearGrid.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +# ------------------------------------------------------------------ # +# NOTE: # +# 1. It might be a good idea to separate IO and calculation. # +# 2. Some of the calculation could be useful in other situations, # +# why not build a math_util, or math_sup module that contains # +# all the useful functions. # +# ------------------------------------------------------------------ # + +import os +import vtk +import numpy as np +import damask +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +# ----- HELPER FUNCTION ----- # +def getMeshFromXYZ(xyzArray, mode): + """calc Vx,Vy,Vz vectors for vtk rectangular mesh""" + # NOTE: + # --> np.unique will automatically sort the list + # --> although not exactly n(1), but since mesh dimension should + # small anyway, so this is still light weight. + dim = xyzArray.shape[1] # 2D:2, 3D:3 + coords = [np.unique(xyzArray[:, i]) for i in xrange(dim)] + + if mode == 'cell': + # since x, y, z might now have the same number of elements, + # we have to deal with them individually + for ri in xrange(dim): + vctr_pt = coords[ri] + vctr_cell = np.empty(len(vctr_pt)+1) + # calculate first and last end point + vctr_cell[0] = vctr_pt[0] - 0.5*abs(vctr_pt[1] - vctr_pt[0]) + vctr_cell[-1] = vctr_pt[-1] + 0.5*abs(vctr_pt[-2] - vctr_pt[-1]) + for cj in xrange(1, len(vctr_cell)-1): + vctr_cell[cj] = 0.5*(vctr_pt[cj-1] + vctr_pt[cj]) + # update the coords + coords[ri] = vctr_cell + + if dim < 3: + coords.append([0]) # expand to a 3D with 0 for z + + # auxiliary description + grid = np.array(map(len, coords), 'i') + N = grid.prod() if mode == 'point' else (grid-1).prod() + return coords, grid, N + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- + +msg = "Create regular voxel grid from points in an ASCIItable." +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=msg, + version=scriptID) + +parser.add_option('-m', + '--mode', + dest='mode', + metavar='string', + type='choice', choices=['cell', 'point'], + help='cell-centered or point-centered coordinates') +parser.add_option('-p', + '--pos', '--position', + dest='pos', + type='string', metavar='string', + help='label of coordinates [%default]') + +parser.set_defaults(mode='cell', + pos='pos') + +(options, filenames) = parser.parse_args() + +# ----- loop over input files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + continue + damask.util.report(scriptName, name) + + # ----- read xyzArray from HDF5 file ----- # + xyzArray = h5f.get_data(options.pos) + + # ----- figure out size and grid ----- # + coords, grid, N = getMeshFromXYZ(xyzArray, options.mode) + + # ----- process data ----- # + rGrid = vtk.vtkRectilinearGrid() + # WARNING: list expansion does not work here as these are + # just pointers for a vtk instance. Simply put, + # DON't USE + # [] * + coordArray = [vtk.vtkDoubleArray(), + vtk.vtkDoubleArray(), + vtk.vtkDoubleArray()] + + rGrid.SetDimensions(*grid) + for i, points in enumerate(coords): + for point in points: + coordArray[i].InsertNextValue(point) + + rGrid.SetXCoordinates(coordArray[0]) + rGrid.SetYCoordinates(coordArray[1]) + rGrid.SetZCoordinates(coordArray[2]) + + # ----- output result ----- # + dirPath = os.path.split(name)[0] + if name: + writer = vtk.vtkXMLRectilinearGridWriter() + writer.SetCompressorTypeToZLib() + writer.SetDataModeToBinary() + # getting the name is a little bit tricky + vtkFileName = os.path.splitext(os.path.split(name)[1])[0] + vtkFileName += '_{}({})'.format(options.pos, options.mode) + vtkFileName += '.' + writer.GetDefaultFileExtension() + writer.SetFileName(os.path.join(dirPath, vtkFileName)) + else: + writer = vtk.vtkDataSetWriter() + writer.SetHeader('# powered by '+scriptID) + writer.WriteToOutputStringOn() + + if vtk.VTK_MAJOR_VERSION <= 5: + writer.SetInput(rGrid) + else: + writer.SetInputData(rGrid) + + writer.Write() From 2f2490e784d453b39c1c92200fdf64324c862764 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 10:48:08 -0400 Subject: [PATCH 05/35] remove unused import --- processing/post/vtk_addRectilinearGridData.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 4848e0efc..4c068b19d 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -6,7 +6,6 @@ import damask from vtk.util import numpy_support from collections import defaultdict from optparse import OptionParser -from damask.h5table import lables_to_path scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) From 87b857d3079eeb41bcf391ea03ca45a25ec65c00 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 12:05:36 -0400 Subject: [PATCH 06/35] update some comments --- lib/damask/h5table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/damask/h5table.py b/lib/damask/h5table.py index 94f3421ae..7b987fad4 100644 --- a/lib/damask/h5table.py +++ b/lib/damask/h5table.py @@ -63,7 +63,7 @@ class H5Table(object): ------ del_entry() -- Force delete attributes/group/datasets (Dangerous) get_attr() -- Return attributes if possible - add_attr() -- Add NEW attributes to dataset/group (please delete old first!) + add_attr() -- Add NEW attributes to dataset/group (no force overwrite) get_data() -- Retrieve data in numpy.ndarray add_data() -- Add dataset to H5 file get_cmdlog() -- Return the command used to generate the data if possible. From 1f01dce8626a97987f0d48a1bacfc1ec43018981 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 12:06:09 -0400 Subject: [PATCH 07/35] use 4 space indentation --- processing/post/ascii2hdf5.py | 93 ++++++++++++++++++----------------- 1 file changed, 49 insertions(+), 44 deletions(-) diff --git a/processing/post/ascii2hdf5.py b/processing/post/ascii2hdf5.py index 40360ace8..5442cd53b 100755 --- a/processing/post/ascii2hdf5.py +++ b/processing/post/ascii2hdf5.py @@ -33,44 +33,44 @@ scriptID = ' '.join([scriptName,damask.version]) # ----- helper function ----- # def get_rectMshVectors(xyz_array, posNum): - """take in a xyz array from rectangular mesh and figure out Vx, Vy, Vz""" - # need some improvement, and only works for rectangular grid - v = sorted(list(set(xyz_array[:, posNum]))) - v_interval = (v[2]+v[1])/2.0 - (v[1]+v[0])/2.0 - v_start = (v[1]+v[0])/2.0 - v_interval - v_end = (v[-1]+v[-2])/2.0 + v_interval - V = np.linspace(v_start, v_end, len(v)+1) - return V + """take in a xyz array from rectangular mesh and figure out Vx, Vy, Vz""" + # need some improvement, and only works for rectangular grid + v = sorted(list(set(xyz_array[:, posNum]))) + v_interval = (v[2]+v[1])/2.0 - (v[1]+v[0])/2.0 + v_start = (v[1]+v[0])/2.0 - v_interval + v_end = (v[-1]+v[-2])/2.0 + v_interval + V = np.linspace(v_start, v_end, len(v)+1) + return V # ----- MAIN ---- # desp_msg = "Convert DAMASK ascii table to HDF5 file" parser = OptionParser(option_class=damask.extendableOption, - usage='%prog options [file[s]]', - description = desp_msg, - version = scriptID) + usage='%prog options [file[s]]', + description = desp_msg, + version = scriptID) parser.add_option('-D', '--DefinitionFile', - dest = 'storage definition file', - type = 'string', - metavar = 'string', - help = 'definition file for H5 data storage') + dest = 'storage definition file', + type = 'string', + metavar = 'string', + help = 'definition file for H5 data storage') parser.add_option('-p', - '--pos', '--position', - dest = 'pos', - type = 'string', metavar = 'string', - help = 'label of coordinates [%default]') + '--pos', '--position', + dest = 'pos', + type = 'string', metavar = 'string', + help = 'label of coordinates [%default]') parser.set_defaults(DefinitionFile='default', - pos='pos') + pos='pos') (options,filenames) = parser.parse_args() filename = filenames[0] if options.DefinitionFile == 'default': - defFile = None + defFile = None else: - defFile = options.DefinitionFile + defFile = options.DefinitionFile # ----- read in data using DAMASK ASCII table class ----- # asciiTable = damask.ASCIItable(name=filename, buffered=False) @@ -96,9 +96,9 @@ mshGridDim = [len(Vx)-1, len(Vy)-1, len(Vz)-1] # force remove existing HDF5 file h5fName = filename.replace(".txt", ".h5") try: - os.remove(h5fName) + os.remove(h5fName) except OSError: - pass + pass h5f = damask.H5Table(h5fName, new_file=True, dsXMLFile=defFile) @@ -112,24 +112,29 @@ h5f.add_data("Vz", Vz) # add the rest of data from table labelsProcessed = ['inc'] for fi in xrange(len(labels)): - featureName = labels[fi] - # remove trouble maker "("" and ")" from label/feature name - if "(" in featureName: featureName = featureName.replace("(", "") - if ")" in featureName: featureName = featureName.replace(")", "") - # skip increment and duplicated columns in the ASCII table - if featureName in labelsProcessed: continue + featureName = labels[fi] + # remove trouble maker "("" and ")" from label/feature name + if "(" in featureName: + featureName = featureName.replace("(", "") + if ")" in featureName: + featureName = featureName.replace(")", "") + # skip increment and duplicated columns in the ASCII table + if featureName in labelsProcessed: + continue - featureIdx = labels_idx[fi] - featureDim = featuresDim[fi] - # grab the data hook - dataset = fullTable[:, featureIdx:featureIdx+featureDim] - # mapping 2D data onto a 3D rectangular mesh to get 4D data - # WARNING: In paraview, the data for a recmesh is mapped as: - # --> len(z), len(y), len(x), size(data) - # dataset = dataset.reshape((mshGridDim[0], mshGridDim[1], mshGridDim[2], - # dataset.shape[1])) - # write out data - print "adding {}...".format(featureName) - h5f.add_data(featureName, dataset) - # write down the processed label - labelsProcessed.append(featureName) + featureIdx = labels_idx[fi] + featureDim = featuresDim[fi] + # grab the data hook + dataset = fullTable[:, featureIdx:featureIdx+featureDim] + # mapping 2D data onto a 3D rectangular mesh to get 4D data + # WARNING: In paraview, the data for a recmesh is mapped as: + # --> len(z), len(y), len(x), size(data) + # dataset = dataset.reshape((mshGridDim[0], + # mshGridDim[1], + # mshGridDim[2], + # dataset.shape[1])) + # write out data + print "adding {}...".format(featureName) + h5f.add_data(featureName, dataset) + # write down the processed label + labelsProcessed.append(featureName) From 33de9cf2b98a41af0a07e46e7631c5ab49bb66fe Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 12:06:59 -0400 Subject: [PATCH 08/35] script for adding data from HDF5 to vtk file --- .../post/h5_vtkAddRectilinearGridData.py | 191 ++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100755 processing/post/h5_vtkAddRectilinearGridData.py diff --git a/processing/post/h5_vtkAddRectilinearGridData.py b/processing/post/h5_vtkAddRectilinearGridData.py new file mode 100755 index 000000000..1c0492f53 --- /dev/null +++ b/processing/post/h5_vtkAddRectilinearGridData.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import vtk +import damask +from vtk.util import numpy_support +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +msg = "Add scalars, vectors, and/or an RGB tuple from" +msg += "an HDF5 to existing VTK rectilinear grid (.vtr/.vtk)." +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=msg, + version=scriptID) +parser.add_option('--vtk', + dest='vtk', + type='string', metavar='string', + help='VTK file name') +parser.add_option('--inplace', + dest='inplace', + action='store_true', + help='modify VTK file in-place') +parser.add_option('-r', '--render', + dest='render', + action='store_true', + help='open output in VTK render window') +parser.add_option('-d', '--data', + dest='data', + action='extend', metavar='', + help='scalar/vector value(s) label(s)') +parser.add_option('-t', '--tensor', + dest='tensor', + action='extend', metavar='', + help='tensor (3x3) value label(s)') +parser.add_option('-c', '--color', + dest='color', + action='extend', metavar='', + help='RGB color tuple label') +parser.add_option('-m', + '--mode', + dest='mode', + metavar='string', + type='choice', choices=['cell', 'point'], + help='cell-centered or point-centered coordinates') + +parser.set_defaults(data=[], + tensor=[], + color=[], + mode='cell', + inplace=False, + render=False) + +(options, filenames) = parser.parse_args() + +# ----- Legacy VTK format support ----- # +if os.path.splitext(options.vtk)[1] == '.vtr': + reader = vtk.vtkXMLRectilinearGridReader() + reader.SetFileName(options.vtk) + reader.Update() + rGrid = reader.GetOutput() +elif os.path.splitext(options.vtk)[1] == '.vtk': + reader = vtk.vtkGenericDataObjectReader() + reader.SetFileName(options.vtk) + reader.Update() + rGrid = reader.GetRectilinearGridOutput() +else: + parser.error('Unsupported VTK file type extension.') + +Npoints = rGrid.GetNumberOfPoints() +Ncells = rGrid.GetNumberOfCells() + +# ----- Summary output (Sanity Check) ----- # +msg = '{}: {} points and {} cells...'.format(options.vtk, + Npoints, + Ncells) +damask.util.croak(msg) + +# ----- Read HDF5 file ----- # +# NOTE: +# --> It is possible in the future we are trying to add data +# from different increment into the same VTK file, but +# this feature is not supported for the moment. +# --> Let it fail, if the HDF5 is invalid, python interpretor +# --> should be able to catch this error. +h5f = damask.H5Table(filenames[0], new_file=False) + +# ----- Process data ----- # +featureToAdd = {'data': options.data, + 'tensor': options.tensor, + 'color': options.color} +VTKarray = {} # store all vtkData in dict, then ship them to file +for dataType in featureToAdd.keys(): + featureNames = featureToAdd[dataType] + for featureName in featureNames: + VTKtype = vtk.VTK_DOUBLE + VTKdata = h5f.get_data(featureName) + if dataType == 'color': + VTKtype = vtk.VTK_UNSIGNED_CHAR + VTKdata = (VTKdata*255).astype(int) + elif dataType == 'tensor': + # Force symmetries tensor type data + VTKdata[:, 1] = VTKdata[:, 3] = 0.5*(VTKdata[:, 1]+VTKdata[:, 3]) + VTKdata[:, 2] = VTKdata[:, 6] = 0.5*(VTKdata[:, 2]+VTKdata[:, 6]) + VTKdata[:, 5] = VTKdata[:, 7] = 0.5*(VTKdata[:, 5]+VTKdata[:, 7]) + # use vtk build-in numpy support to add data (much faster) + # NOTE: + # --> deep copy is necessary here, otherwise memory leak could occur + VTKarray[featureName] = numpy_support.numpy_to_vtk(num_array=VTKdata, + deep=True, + array_type=VTKtype) + VTKarray[featureName].SetName(featureName) + +# ----- ship data to vtkGrid ----- # +mode = options.mode +damask.util.croak('{} mode...'.format(mode)) + +# NOTE: +# --> For unknown reason, Paraview only recognize one +# tensor attributes per cell, thus it would be safe +# to only add one attributes as tensor. +for dataType in featureToAdd.keys(): + featureNames = featureToAdd[dataType] + for featureName in featureNames: + if dataType == 'color': + if mode == 'cell': + rGrid.GetCellData().SetScalars(VTKarray[featureName]) + elif mode == 'point': + rGrid.GetPointData().SetScalars(VTKarray[featureName]) + elif dataType == 'tensor': + if mode == 'cell': + rGrid.GetCellData().SetTensors(VTKarray[featureName]) + elif mode == 'point': + rGrid.GetPointData().SetTensors(VTKarray[featureName]) + else: + if mode == 'cell': + rGrid.GetCellData().AddArray(VTKarray[featureName]) + elif mode == 'point': + rGrid.GetPointData().AddArray(VTKarray[featureName]) + +rGrid.Modified() +if vtk.VTK_MAJOR_VERSION <= 5: + rGrid.Update() + +# ----- write Grid to VTK file ----- # +writer = vtk.vtkXMLRectilinearGridWriter() +writer.SetDataModeToBinary() +writer.SetCompressorTypeToZLib() +vtkFileN = os.path.splitext(options.vtk)[0] +vtkExtsn = '.vtr' if options.inplace else '_added.vtr' +writer.SetFileName(vtkFileN+vtkExtsn) +if vtk.VTK_MAJOR_VERSION <= 5: + writer.SetInput(rGrid) +else: + writer.SetInputData(rGrid) +writer.Write() + +# ----- render results from script ----- # +if options.render: + mapper = vtk.vtkDataSetMapper() + mapper.SetInputData(rGrid) + actor = vtk.vtkActor() + actor.SetMapper(mapper) + + # Create the graphics structure. The renderer renders into the + # render window. The render window interactor captures mouse events + # and will perform appropriate camera or actor manipulation + # depending on the nature of the events. + + ren = vtk.vtkRenderer() + + renWin = vtk.vtkRenderWindow() + renWin.AddRenderer(ren) + + ren.AddActor(actor) + ren.SetBackground(1, 1, 1) + renWin.SetSize(200, 200) + + iren = vtk.vtkRenderWindowInteractor() + iren.SetRenderWindow(renWin) + + iren.Initialize() + renWin.Render() + iren.Start() From 05a3b569fc30ee53a4a73a25e6883d7dd5d944f7 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 12:26:46 -0400 Subject: [PATCH 09/35] adding parallel version of addCalc for H5 table --- processing/post/h5_addCalculation.py | 56 ++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100755 processing/post/h5_addCalculation.py diff --git a/processing/post/h5_addCalculation.py b/processing/post/h5_addCalculation.py new file mode 100755 index 000000000..0ceeeefea --- /dev/null +++ b/processing/post/h5_addCalculation.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import re +import sys +import collections +import math +import damask +import numpy as np +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +# ----- Helper functions ----- # +def listify(x): + return x if isinstance(x, collections.Iterable) else [x] + + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +usageEx = """ +usage_in_details: + Column labels are tagged by '#label#' in formulas. + Use ';' for ',' in functions. Numpy is available as 'np'. + Special variables: #_row_# -- row index + + Examples: + (1) magnitude of vector -- "np.linalg.norm(#vec#)" + (2) rounded root of row number -- "round(math.sqrt(#_row_#);3)" +""" +desp = "Add or alter column(s) with derived values according to " +desp += "user-defined arithmetic operation between column(s)." + +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]' + usageEx, + description=desp, + version=scriptID) +parser.add_option('-l', '--label', + dest='labels', + action='extend', metavar='', + help='(list of) new column labels') +parser.add_option('-f', '--formula', + dest='formulas', + action='extend', metavar='', + help='(list of) formulas corresponding to labels') +parser.add_option('-c', '--condition', + dest='condition', metavar='string', + help='condition to filter rows') + +parser.set_defaults(condition=None) + +(options, filenames) = parser.parse_args() From 434514e0f29e30469d05a824e4fd6cbfabe5f4c8 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 14:30:04 -0400 Subject: [PATCH 10/35] allow overwriting existing dataset modification to the data is logged --- lib/damask/h5table.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/lib/damask/h5table.py b/lib/damask/h5table.py index 7b987fad4..829379dea 100644 --- a/lib/damask/h5table.py +++ b/lib/damask/h5table.py @@ -120,6 +120,16 @@ class H5Table(object): dataType, h5f_path = lables_to_path(feature_name, dsXMLPath=self.dsXMLFile) with h5py.File(self.h5f_path, 'a') as h5f: + # NOTE: + # --> If dataset exists, delete the old one so as to write + # a new one. For brand new dataset. For brand new one, + # record its state as fresh in the cmd log. + try: + del h5f[h5f_path] + print "***deleting old {} from {}".format(feature_name, + self.h5f_path) + except: + cmd_log += " [FRESH]" h5f.create_dataset(h5f_path, data=dataset) # store the cmd in log is possible if cmd_log is not None: From fd4b495de21c7550863d15a79943a847fad49fcc Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 14:30:46 -0400 Subject: [PATCH 11/35] adding interface for addCalc with HDF5 --- processing/post/h5_addCalculation.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/processing/post/h5_addCalculation.py b/processing/post/h5_addCalculation.py index 0ceeeefea..2861eb915 100755 --- a/processing/post/h5_addCalculation.py +++ b/processing/post/h5_addCalculation.py @@ -54,3 +54,19 @@ parser.add_option('-c', '--condition', parser.set_defaults(condition=None) (options, filenames) = parser.parse_args() + +# ----- parse formulas ----- # +for i in xrange(len(options.formulas)): + options.formulas[i] = options.formulas[i].replace(';', ',') + +# ----- loop over input files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + print "!!!Cannot process {}".format(name) + continue + damask.util.report(scriptName, name) + +# Note: +# --> not immediately needed, come back later From dfb49c3138da194b3f41b2e8f46639891aa5fc0a Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 14:32:20 -0400 Subject: [PATCH 12/35] as script for calc Cauchy stress in HDF5 no safe net in the script, following 'let-it-fail' design --- processing/post/h5_addCauchy.py | 62 +++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100755 processing/post/h5_addCauchy.py diff --git a/processing/post/h5_addCauchy.py b/processing/post/h5_addCauchy.py new file mode 100755 index 000000000..abcfefafe --- /dev/null +++ b/processing/post/h5_addCauchy.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import sys +import damask +import numpy as np +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +def getCauchy(f, p): + """return Cauchy stress for given f and p""" + # [Cauchy] = (1/det(F)) * [P].[F_transpose] + f = f.reshape((3, 3)) + p = p.reshape((3, 3)) + return 1.0/np.linalg.det(f)*np.dot(p, f.T).reshape(9) + + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +desp = "Add column(s) containing Cauchy stress based on given column(s)" +desp += "of deformation gradient and first Piola--Kirchhoff stress." +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=desp, + version=scriptID) +parser.add_option('-f', '--defgrad', + dest='defgrad', + type='string', metavar='string', + help='heading for deformation gradient [%default]') +parser.add_option('-p', '--stress', + dest='stress', + type='string', metavar='string', + help='heading for first Piola--Kirchhoff stress [%default]') + +parser.set_defaults(defgrad='f', + stress='p') + +(options, filenames) = parser.parse_args() + +# ----- loop over input H5 files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + continue + damask.util.report(scriptName, name) + + # ----- read in data ----- # + f = h5f.get_data("f") + p = h5f.get_data("p") + + # ----- calculate Cauchy stress ----- # + cauchy = [getCauchy(f_i, p_i) for f_i, p_i in zip(f, p)] + + # ----- write to HDF5 file ----- # + cmd_log = " ".join([scriptID, name]) + h5f.add_data('Cauchy', np.array(cauchy), cmd_log=cmd_log) From 0a357616363386aa38b58da2465f34f85cb50246 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 14:33:22 -0400 Subject: [PATCH 13/35] delete unused module --- processing/post/h5_addCauchy.py | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/post/h5_addCauchy.py b/processing/post/h5_addCauchy.py index abcfefafe..d80492623 100755 --- a/processing/post/h5_addCauchy.py +++ b/processing/post/h5_addCauchy.py @@ -2,7 +2,6 @@ # -*- coding: UTF-8 no BOM -*- import os -import sys import damask import numpy as np from optparse import OptionParser From b04c5801a5b610ee2e47c58936eefb415a4f03d8 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 14 Oct 2016 14:38:09 -0400 Subject: [PATCH 14/35] syntax polish --- processing/post/ascii2hdf5.py | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/processing/post/ascii2hdf5.py b/processing/post/ascii2hdf5.py index 5442cd53b..aef575f38 100755 --- a/processing/post/ascii2hdf5.py +++ b/processing/post/ascii2hdf5.py @@ -28,12 +28,12 @@ from optparse import OptionParser scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) +scriptID = ' '.join([scriptName, damask.version]) # ----- helper function ----- # def get_rectMshVectors(xyz_array, posNum): - """take in a xyz array from rectangular mesh and figure out Vx, Vy, Vz""" + """Get Vx, Vy, Vz for rectLinear grid""" # need some improvement, and only works for rectangular grid v = sorted(list(set(xyz_array[:, posNum]))) v_interval = (v[2]+v[1])/2.0 - (v[1]+v[0])/2.0 @@ -46,24 +46,23 @@ def get_rectMshVectors(xyz_array, posNum): # ----- MAIN ---- # desp_msg = "Convert DAMASK ascii table to HDF5 file" parser = OptionParser(option_class=damask.extendableOption, - usage='%prog options [file[s]]', - description = desp_msg, - version = scriptID) + usage='%prog options [file[s]]', + description=desp_msg, + version=scriptID) parser.add_option('-D', '--DefinitionFile', - dest = 'storage definition file', - type = 'string', - metavar = 'string', - help = 'definition file for H5 data storage') -parser.add_option('-p', - '--pos', '--position', - dest = 'pos', - type = 'string', metavar = 'string', - help = 'label of coordinates [%default]') + dest='storage definition file', + type='string', + metavar='string', + help='definition file for H5 data storage') +parser.add_option('-p', '--pos', '--position', + dest='pos', + type='string', metavar='string', + help='label of coordinates [%default]') parser.set_defaults(DefinitionFile='default', - pos='pos') + pos='pos') -(options,filenames) = parser.parse_args() +(options, filenames) = parser.parse_args() filename = filenames[0] From 185a5aaabe3a047b29718052877d5544ca18dd57 Mon Sep 17 00:00:00 2001 From: Test User Date: Sat, 15 Oct 2016 11:00:59 +0200 Subject: [PATCH 15/35] updated version information after successful test of v2.0.1-222-g33de9cf --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index da01b9df1..ca4ee7d2a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-213-g75149cc +v2.0.1-222-g33de9cf From fb1e4f0c3973fa4810507565c6f0334bfc35e5b0 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 12:24:29 -0400 Subject: [PATCH 16/35] add IPF color tuple for HDF5 file --- processing/post/h5_addIPFcolor.py | 174 ++++++++++++++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100755 processing/post/h5_addIPFcolor.py diff --git a/processing/post/h5_addIPFcolor.py b/processing/post/h5_addIPFcolor.py new file mode 100755 index 000000000..469e3e386 --- /dev/null +++ b/processing/post/h5_addIPFcolor.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import sys +import math +import damask +import numpy as np +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + +# TODO +# This implementation will have to iterate through the array one +# element at a time, maybe there are some other ways to make this +# faster. + + +# ----- Helper functions ----- # +# ref: http://stackoverflow.com/questions/16801322/how-can-i-check-that-a-list- +# has-one-and-only-one-truthy-value +# NOTE: +# These functions might be useful in the future +def n_trues(iterable, n=1): + """N(trues) = n""" + i = iter(iterable) + return all(any(i) for j in range(n)) and not any(i) + + +def up_to_n_trues(iterable, n=1): + i = iter(iterable) + all(any(i) for j in range(n)) + return not any(i) + + +def at_least_n_trues(iterable, n=1): + i = iter(iterable) + return all(any(i) for j in range(n)) + + +def m_to_n_trues(iterable, m=1, n=1): + i = iter(iterable) + assert m <= n + return at_least_n_trues(i, m) and up_to_n_trues(i, n - m) + + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +desp = "Add RGB color value corresponding to TSL-OIM scheme for IPF." +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=desp, + version=scriptID) +parser.add_option('-p', '--pole', + dest='pole', + type='float', nargs=3, metavar='float float float', + help='lab frame direction for IPF [%default]') +msg = ', '.join(damask.Symmetry.lattices[1:]) +parser.add_option('-s', '--symmetry', + dest='symmetry', + type='choice', choices=damask.Symmetry.lattices[1:], + metavar='string', + help='crystal symmetry [%default] {{{}}} '.format(msg)) +parser.add_option('-e', '--eulers', + dest='eulers', + type='string', metavar='string', + help='Euler angles label') +parser.add_option('-d', '--degrees', + dest='degrees', + action='store_true', + help='Euler angles are given in degrees [%default]') +parser.add_option('-m', '--matrix', + dest='matrix', + type='string', metavar='string', + help='orientation matrix label') +parser.add_option('-a', + dest='a', + type='string', metavar='string', + help='crystal frame a vector label') +parser.add_option('-b', + dest='b', + type='string', metavar='string', + help='crystal frame b vector label') +parser.add_option('-c', + dest='c', + type='string', metavar='string', + help='crystal frame c vector label') +parser.add_option('-q', '--quaternion', + dest='quaternion', + type='string', metavar='string', + help='quaternion label') + +parser.set_defaults(pole=(0.0, 0.0, 1.0), + symmetry=damask.Symmetry.lattices[-1], + degrees=False) + +(options, filenames) = parser.parse_args() + +# safe guarding to have only one orientation representation +# use dynamic typing to group a,b,c into frame +options.frame = [options.a, options.b, options.c] +input = [options.eulers is not None, + all(options.frame), + options.matrix is not None, + options.quaternion is not None] + +if np.sum(input) != 1: + parser.error('needs exactly one input format.') + +# select input label that was requested (active) +label_active = np.where(input)[0][0] +(label, dim, inputtype) = [(options.eulers, 3, 'eulers'), + (options.frame, [3, 3, 3], 'frame'), + (options.matrix, 9, 'matrix'), + (options.quaternion, 4, 'quaternion')][label_active] + +# rescale degrees to radians +toRadians = math.pi/180.0 if options.degrees else 1.0 + +# only use normalized pole +pole = np.array(options.pole) +pole /= np.linalg.norm(pole) + +# ----- Loop over input files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + continue + damask.util.report(scriptName, name) + + # extract data from HDF5 file + if inputtype == 'eulers': + orieData = h5f.get_data(label) + elif inputtype == 'matrix': + orieData = h5f.get_data(label) + orieData = orieData.reshape(orieData.shape[0], 3, 3) + elif inputtype == 'frame': + vctr_a = h5f.get_data(label[0]) + vctr_b = h5f.get_data(label[1]) + vctr_c = h5f.get_data(label[2]) + frame = np.column_stack((vctr_a, vctr_b, vctr_c)) + orieData = frame.reshape(frame.shape[0], 3, 3) + elif inputtype == 'quaternion': + orieData = h5f.get_data(label) + + # calculate the IPF color + rgbArrays = np.zeros((orieData.shape[0], 3)) + for ci in xrange(rgbArrays.shape[0]): + if inputtype == 'eulers': + o = damask.Orientation(Eulers=np.array(orieData[ci, :])*toRadians, + symmetry=options.symmetry).reduced() + elif inputtype == 'matrix': + o = damask.Orientation(matrix=orieData[ci, :, :].transpose(), + symmetry=options.symmetry).reduced() + elif inputtype == 'frame': + o = damask.Orientation(matrix=orieData[ci, :, :], + symmetry=options.symmetry).reduced() + elif inputtype == 'quaternion': + o = damask.Orientation(quaternion=orieData[ci, :], + symmetry=options.symmetry).reduced() + rgbArrays[ci, :] = o.IPFcolor(pole) + + # compose labels/headers for IPF color (RGB) + labelIPF = 'IPF_{:g}{:g}{:g}_{sym}'.format(*options.pole, + sym=options.symmetry.lower()) + + # compose cmd history (go with dataset) + cmd_log = scriptID + '\t' + ' '.join(sys.argv[1:]) + + # write data to HDF5 file + h5f.add_data(labelIPF, rgbArrays, cmd_log=cmd_log) From 1a2194f0428afbdebed38f7afb4e2f46cfe1776e Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 16:23:21 -0400 Subject: [PATCH 17/35] remove useless functions --- processing/post/h5_addIPFcolor.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/processing/post/h5_addIPFcolor.py b/processing/post/h5_addIPFcolor.py index 469e3e386..e8abc2a0f 100755 --- a/processing/post/h5_addIPFcolor.py +++ b/processing/post/h5_addIPFcolor.py @@ -16,35 +16,6 @@ scriptID = ' '.join([scriptName, damask.version]) # element at a time, maybe there are some other ways to make this # faster. - -# ----- Helper functions ----- # -# ref: http://stackoverflow.com/questions/16801322/how-can-i-check-that-a-list- -# has-one-and-only-one-truthy-value -# NOTE: -# These functions might be useful in the future -def n_trues(iterable, n=1): - """N(trues) = n""" - i = iter(iterable) - return all(any(i) for j in range(n)) and not any(i) - - -def up_to_n_trues(iterable, n=1): - i = iter(iterable) - all(any(i) for j in range(n)) - return not any(i) - - -def at_least_n_trues(iterable, n=1): - i = iter(iterable) - return all(any(i) for j in range(n)) - - -def m_to_n_trues(iterable, m=1, n=1): - i = iter(iterable) - assert m <= n - return at_least_n_trues(i, m) and up_to_n_trues(i, n - m) - - # -------------------------------------------------------------------- # MAIN # -------------------------------------------------------------------- From 74b29881f31bb2f45447972cb9f315bad59a31b3 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 16:24:11 -0400 Subject: [PATCH 18/35] strain tensor calculation script for HDF5 --- processing/post/h5_addStrainTensors.py | 153 +++++++++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 processing/post/h5_addStrainTensors.py diff --git a/processing/post/h5_addStrainTensors.py b/processing/post/h5_addStrainTensors.py new file mode 100644 index 000000000..eca63e317 --- /dev/null +++ b/processing/post/h5_addStrainTensors.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import sys +import damask +import numpy as np +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +# ----- Helper functions ----- # +def operator(stretch, strain, eigenvalues): + # Albrecht Bertram: Elasticity and Plasticity of Large Deformations + # An Introduction (3rd Edition, 2012), p. 102 + return {'V#ln': np.log(eigenvalues), + 'U#ln': np.log(eigenvalues), + 'V#Biot': (np.ones(3, 'd') - 1.0/eigenvalues), + 'U#Biot': (eigenvalues - np.ones(3, 'd')), + 'V#Green': (np.ones(3, 'd') - 1.0/eigenvalues/eigenvalues)*0.5, + 'U#Green': (eigenvalues*eigenvalues - np.ones(3, 'd'))*0.5, + }[stretch+'#'+strain] + + +def calcEPS(defgrads, stretchType, strainType): + """calculate specific type of strain tensor""" + eps = np.zeros(defgrads.shape) # initialize container + + # TODO: + # this loop can use some performance boost + # (multi-threading?) + for ri in xrange(defgrads.shape[0]): + f = defgrads[ri, :, :].reshape(3, 3) + U, S, Vh = np.lingalg.svd(f) + R = np.dot(U, Vh) # rotation of polar decomposition + if stretchType == 'U': + stretch = np.dot(np.linalg.inv(R), f) # F = RU + elif stretchType == 'V': + stretch = np.dot(f, np.linalg.inv(R)) # F = VR + + # kill nasty noisy data + stretch = np.where(abs(stretch) < 1e-12, 0, stretch) + + (D, V) = np.linalg.eig(stretch) + # flip principal component with negative Eigen values + neg = np.where(D < 0.0) + D[neg] *= -1. + V[:, neg] *= -1. + + # check each vector for orthogonality + # --> brutal force enforcing orthogonal base + # and re-normalize + for i, eigval in enumerate(D): + if np.dot(V[:, i], V[:, (i+1) % 3]) != 0.0: + V[:, (i+1) % 3] = np.cross(V[:, (i+2) % 3], V[:, i]) + V[:, (i+1) % 3] /= np.sqrt(np.dot(V[:, (i+1) % 3], + V[:, (i+1) % 3].conj())) + + # calculate requested version of strain tensor + d = operator(stretchType, strainType, D) + eps[ri] = (np.dot(V, np.dot(np.diag(d), V.T)).real).reshape(9) + + return eps + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +desp = "Add column(s) containing given strains based on given stretches" +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=desp, + version=scriptID) +msg = 'material strains based on right Cauchy-Green deformation, i.e., C and U' +parser.add_option('-u', '--right', + dest='right', + action='store_true', + help=msg) +msg = 'spatial strains based on left Cauchy--Green deformation, i.e., B and V' +parser.add_option('-v', '--left', + dest='left', + action='store_true', + help=msg) +parser.add_option('-0', '--logarithmic', + dest='logarithmic', + action='store_true', + help='calculate logarithmic strain tensor') +parser.add_option('-1', '--biot', + dest='biot', + action='store_true', + help='calculate biot strain tensor') +parser.add_option('-2', '--green', + dest='green', + action='store_true', + help='calculate green strain tensor') +msg = 'heading(s) of columns containing deformation tensor values' +parser.add_option('-f', '--defgrad', + dest='defgrad', + action='extend', + metavar='', + help=msg) + +parser.set_defaults(right=False, left=False, + logarithmic=False, biot=False, green=False, + defgrad=['f']) + +(options, filenames) = parser.parse_args() + +stretches = [] +strains = [] + +if options.right: + stretches.append('U') +if options.left: + stretches.append('V') + +if options.logarithmic: + strains.append('ln') +if options.biot: + strains.append('Biot') +if options.green: + strains.append('Green') + +if options.defgrad is None: + parser.error('no data column specified.') + +# ----- Loop over input files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + continue + damask.util.report(scriptName, name) + + # extract defgrads from HDF5 storage + F = h5f.get_data(options.defgrads) + + # allow calculate multiple types of strain within the + # same cmd call + for stretchType in stretches: + for strainType in strains: + # calculate strain tensor for this type + eps = calcEPS(F, stretchType, strainType) + + # compose labels/headers for this strain tensor + labelsStrain = strainType + stretchType + + # prepare log info + cmd_log = scriptID + '\t' + ' '.join(sys.argv[1:]) + + # write data to HDF5 file + h5f.add_data(labelsStrain, eps, cmd_log=cmd_log) From 8e7f0c255bcf601ded78e290431b9d7ea080588e Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 16:58:04 -0400 Subject: [PATCH 19/35] fix some syntax error --- processing/post/h5_addStrainTensors.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) mode change 100644 => 100755 processing/post/h5_addStrainTensors.py diff --git a/processing/post/h5_addStrainTensors.py b/processing/post/h5_addStrainTensors.py old mode 100644 new mode 100755 index eca63e317..65f71e50c --- a/processing/post/h5_addStrainTensors.py +++ b/processing/post/h5_addStrainTensors.py @@ -32,8 +32,8 @@ def calcEPS(defgrads, stretchType, strainType): # this loop can use some performance boost # (multi-threading?) for ri in xrange(defgrads.shape[0]): - f = defgrads[ri, :, :].reshape(3, 3) - U, S, Vh = np.lingalg.svd(f) + f = defgrads[ri, :].reshape(3, 3) + U, S, Vh = np.linalg.svd(f) R = np.dot(U, Vh) # rotation of polar decomposition if stretchType == 'U': stretch = np.dot(np.linalg.inv(R), f) # F = RU @@ -94,6 +94,9 @@ parser.add_option('-2', '--green', dest='green', action='store_true', help='calculate green strain tensor') +# NOTE: +# It might be easier to just calculate one type of deformation gradient +# at a time. msg = 'heading(s) of columns containing deformation tensor values' parser.add_option('-f', '--defgrad', dest='defgrad', @@ -103,7 +106,7 @@ parser.add_option('-f', '--defgrad', parser.set_defaults(right=False, left=False, logarithmic=False, biot=False, green=False, - defgrad=['f']) + defgrad='f') (options, filenames) = parser.parse_args() @@ -134,7 +137,7 @@ for name in filenames: damask.util.report(scriptName, name) # extract defgrads from HDF5 storage - F = h5f.get_data(options.defgrads) + F = h5f.get_data(options.defgrad) # allow calculate multiple types of strain within the # same cmd call From 4d849219f7e78a416d79b621cc638821fb5d1386 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 16:59:42 -0400 Subject: [PATCH 20/35] will come back for this later --- processing/post/h5_addCalculation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/processing/post/h5_addCalculation.py b/processing/post/h5_addCalculation.py index 2861eb915..dc4975dbf 100755 --- a/processing/post/h5_addCalculation.py +++ b/processing/post/h5_addCalculation.py @@ -2,12 +2,12 @@ # -*- coding: UTF-8 no BOM -*- import os -import re -import sys +# import re +# import sys import collections -import math +# import math import damask -import numpy as np +# import numpy as np from optparse import OptionParser scriptName = os.path.splitext(os.path.basename(__file__))[0] From 703ae3c6d6a89ac698007ebf20033f5d3737a303 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 17:29:44 -0400 Subject: [PATCH 21/35] add script for adding von Mises equivalent to HDF5 file --- processing/post/h5_addMises.py | 85 ++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100755 processing/post/h5_addMises.py diff --git a/processing/post/h5_addMises.py b/processing/post/h5_addMises.py new file mode 100755 index 000000000..941a2f5da --- /dev/null +++ b/processing/post/h5_addMises.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python2.7 +# -*- coding: UTF-8 no BOM -*- + +import os +import sys +import math +import damask +import numpy as np +from optparse import OptionParser + +scriptName = os.path.splitext(os.path.basename(__file__))[0] +scriptID = ' '.join([scriptName, damask.version]) + + +# ----- Helper functions ----- # +def calcMises(what, tensor): + """calculate von Mises equivalent""" + dev = tensor - np.trace(tensor)/3.0*np.eye(3) + symdev = 0.5*(dev+dev.T) + return math.sqrt(np.sum(symdev*symdev.T) * + { + 'stress': 3.0/2.0, + 'strain': 2.0/3.0, + }[what.lower()]) + + +# -------------------------------------------------------------------- +# MAIN +# -------------------------------------------------------------------- +desp = "Add von Mises equivalent values for symmetric part of requested" +parser = OptionParser(option_class=damask.extendableOption, + usage='%prog options [file[s]]', + description=desp, + version=scriptID) +parser.add_option('-e', '--strain', + dest='strain', + metavar='string', + help='name of dataset containing strain tensors') +parser.add_option('-s', '--stress', + dest='stress', + metavar='string', + help='name of dataset containing stress tensors') + +parser.set_defaults(strain=None, stress=None) + +(options, filenames) = parser.parse_args() + +# ----- Loop over input files ----- # +for name in filenames: + try: + h5f = damask.H5Table(name, new_file=False) + except: + continue + damask.util.report(scriptName, name) + + # TODO: + # Could use some refactoring here + if options.stress is not None: + # extract stress tensor from HDF5 + tnsr = h5f.get_data(options.stress) + + # calculate von Mises equivalent row by row + vmStress = np.zeros(tnsr.shape[0]) + for ri in xrange(tnsr.shape[0]): + stressTnsr = tnsr[ri, :].reshape(3, 3) + vmStress[ri] = calcMises('stress', stressTnsr) + + # compose label + label = "Mises{}".format(options.stress) + + # prepare log info + cmd_log = scriptID + '\t' + ' '.join(sys.argv[1:]) + + # write data to HDF5 file + h5f.add_data(label, vmStress, cmd_log=cmd_log) + + if options.strain is not None: + tnsr = h5f.get_data(options.strain) + vmStrain = np.zeros(tnsr.shape[0]) + for ri in xrange(tnsr.shape[0]): + strainTnsr = tnsr[ri, :].reshape(3, 3) + vmStrain[ri] = calcMises('strain', strainTnsr) + label = "Mises{}".format(options.strain) + cmd_log = scriptID + '\t' + ' '.join(sys.argv[1:]) + h5f.add_data(label, vmStrain, cmd_log=cmd_log) From 51e408a5bf4c18116a0aeab8e1455fa4a7137155 Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 18:24:26 -0400 Subject: [PATCH 22/35] fix error regarding log if no log is provided when adding data, use None --- lib/damask/h5table.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/damask/h5table.py b/lib/damask/h5table.py index 829379dea..95e25f191 100644 --- a/lib/damask/h5table.py +++ b/lib/damask/h5table.py @@ -129,7 +129,8 @@ class H5Table(object): print "***deleting old {} from {}".format(feature_name, self.h5f_path) except: - cmd_log += " [FRESH]" + # if no cmd log, None will used + cmd_log = str(cmd_log) + " [FRESH]" h5f.create_dataset(h5f_path, data=dataset) # store the cmd in log is possible if cmd_log is not None: From 96349ebf53ba581304a5edcfac030595dd414c9d Mon Sep 17 00:00:00 2001 From: chen Date: Mon, 17 Oct 2016 18:24:47 -0400 Subject: [PATCH 23/35] add log to each dataset --- processing/post/ascii2hdf5.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/processing/post/ascii2hdf5.py b/processing/post/ascii2hdf5.py index aef575f38..ea2819592 100755 --- a/processing/post/ascii2hdf5.py +++ b/processing/post/ascii2hdf5.py @@ -91,6 +91,9 @@ Vz = get_rectMshVectors(xyz_array, 2) # use the dimension of the rectangular grid to reshape all other data mshGridDim = [len(Vx)-1, len(Vy)-1, len(Vz)-1] +# ----- compose cmd log ----- # +cmd_log = " ".join([scriptID, filename]) + # ----- create a new HDF5 file and save the data -----# # force remove existing HDF5 file h5fName = filename.replace(".txt", ".h5") @@ -104,9 +107,9 @@ h5f = damask.H5Table(h5fName, # adding increment number as root level attributes h5f.add_attr('inc', incNum) # add the mesh grid data now -h5f.add_data("Vx", Vx) -h5f.add_data("Vy", Vy) -h5f.add_data("Vz", Vz) +h5f.add_data("Vx", Vx, cmd_log=cmd_log) +h5f.add_data("Vy", Vy, cmd_log=cmd_log) +h5f.add_data("Vz", Vz, cmd_log=cmd_log) # add the rest of data from table labelsProcessed = ['inc'] @@ -134,6 +137,6 @@ for fi in xrange(len(labels)): # dataset.shape[1])) # write out data print "adding {}...".format(featureName) - h5f.add_data(featureName, dataset) + h5f.add_data(featureName, dataset, cmd_log=cmd_log) # write down the processed label labelsProcessed.append(featureName) From 9818a074c6206096fe34105e2e644058790614df Mon Sep 17 00:00:00 2001 From: Test User Date: Tue, 18 Oct 2016 04:41:46 +0200 Subject: [PATCH 24/35] updated version information after successful test of v2.0.1-238-g96349eb --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index ca4ee7d2a..d0bf70ba1 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-222-g33de9cf +v2.0.1-238-g96349eb From b8bf51b366fdfb20fe7ce6bd3843064c5dcb41e2 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 18 Oct 2016 15:31:52 -0400 Subject: [PATCH 25/35] changed default FFT spatial derivative to "fwbw_difference" --- code/numerics.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/numerics.f90 b/code/numerics.f90 index 9840630db..c968c7090 100644 --- a/code/numerics.f90 +++ b/code/numerics.f90 @@ -110,7 +110,7 @@ module numerics fftw_plan_mode = 'FFTW_PATIENT' !< reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag character(len=64), protected, public :: & spectral_solver = 'basicpetsc' , & !< spectral solution method - spectral_derivative = 'continuous' !< spectral filtering method + spectral_derivative = 'fwbw_difference' !< spectral spatial derivative method character(len=1024), protected, public :: & petsc_defaultOptions = '-mech_snes_type ngmres & &-damage_snes_type ngmres & From ff2f827a68cb81d222e4fe9cc827e68c46308435 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 18 Oct 2016 15:33:24 -0400 Subject: [PATCH 26/35] strip outermost quotes from labels --- lib/damask/asciitable.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/damask/asciitable.py b/lib/damask/asciitable.py index 06a52280b..3509a5284 100644 --- a/lib/damask/asciitable.py +++ b/lib/damask/asciitable.py @@ -336,6 +336,7 @@ class ASCIItable(): try: idx.append(int(label)-1) # column given as integer number? except ValueError: + label = label[1:-1] if label[0] == label[-1] and label[0] in ('"',"'") else label # remove outermost quotations try: idx.append(self.tags.index(label)) # locate string in label list except ValueError: @@ -348,6 +349,7 @@ class ASCIItable(): idx = int(labels)-1 # offset for python array indexing except ValueError: try: + labels = labels[1:-1] if labels[0] == labels[-1] and labels[0] in ('"',"'") else labels # remove outermost quotations idx = self.tags.index(labels) except ValueError: try: @@ -380,6 +382,7 @@ class ASCIItable(): while idx+myDim < len(self.tags) and self.tags[idx+myDim].startswith("%i_"%(myDim+1)): myDim += 1 # add while found except ValueError: # column has string label + label = label[1:-1] if label[0] == label[-1] and label[0] in ('"',"'") else label # remove outermost quotations if label in self.tags: # can be directly found? myDim = 1 # scalar by definition elif '1_'+label in self.tags: # look for first entry of possible multidim object @@ -399,6 +402,7 @@ class ASCIItable(): while idx+dim < len(self.tags) and self.tags[idx+dim].startswith("%i_"%(dim+1)): dim += 1 # add as long as found except ValueError: # column has string label + labels = labels[1:-1] if labels[0] == labels[-1] and labels[0] in ('"',"'") else labels # remove outermost quotations if labels in self.tags: # can be directly found? dim = 1 # scalar by definition elif '1_'+labels in self.tags: # look for first entry of possible multidim object From 85076ad61300f70b4f5b77f8b43fd174f15b7665 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 18 Oct 2016 16:23:52 -0400 Subject: [PATCH 27/35] check agreement between rate and time #points, extrapolate to outside --- code/source_thermal_externalheat.f90 | 68 ++++++++++++++++------------ 1 file changed, 38 insertions(+), 30 deletions(-) diff --git a/code/source_thermal_externalheat.f90 b/code/source_thermal_externalheat.f90 index e2b14d45d..60aaebe42 100644 --- a/code/source_thermal_externalheat.f90 +++ b/code/source_thermal_externalheat.f90 @@ -1,6 +1,7 @@ !-------------------------------------------------------------------------------------------------- !> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH -!> @brief material subroutine for thermal source due to plastic dissipation +!> @author Philip Eisenlohr, Michigan State University +!> @brief material subroutine for variable heat source !> @details to be done !-------------------------------------------------------------------------------------------------- module source_thermal_externalheat @@ -10,24 +11,24 @@ module source_thermal_externalheat implicit none private - integer(pInt), dimension(:), allocatable, public, protected :: & + integer(pInt), dimension(:), allocatable, public, protected :: & source_thermal_externalheat_sizePostResults, & !< cumulative size of post results source_thermal_externalheat_offset, & !< which source is my current thermal dissipation mechanism? source_thermal_externalheat_instance !< instance of thermal dissipation source mechanism - integer(pInt), dimension(:,:), allocatable, target, public :: & + integer(pInt), dimension(:,:), allocatable, target, public :: & source_thermal_externalheat_sizePostResult !< size of each post result output - character(len=64), dimension(:,:), allocatable, target, public :: & + character(len=64), dimension(:,:), allocatable, target, public :: & source_thermal_externalheat_output !< name of each post result output - integer(pInt), dimension(:), allocatable, target, public :: & + integer(pInt), dimension(:), allocatable, target, public :: & source_thermal_externalheat_Noutput !< number of outputs per instance of this source - integer(pInt), dimension(:), allocatable, private :: & + integer(pInt), dimension(:), allocatable, private :: & source_thermal_externalheat_nIntervals - real(pReal), dimension(:,:), allocatable, private :: & + real(pReal), dimension(:,:), allocatable, private :: & source_thermal_externalheat_time, & source_thermal_externalheat_rate @@ -136,23 +137,26 @@ subroutine source_thermal_externalheat_init(fileUnit) if (phase > 0_pInt ) then; if (any(phase_source(:,phase) == SOURCE_thermal_externalheat_ID)) then ! do not short-circuit here (.and. with next if statemen). It's not safe in Fortran - instance = source_thermal_externalheat_instance(phase) ! which instance of my source is present phase + instance = source_thermal_externalheat_instance(phase) ! which instance of my source is present phase chunkPos = IO_stringPos(line) - tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key + tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key select case(tag) - case ('externalheat_time') + case ('externalheat_time','externalheat_rate') if (chunkPos(1) <= 2_pInt) & call IO_error(150_pInt,ext_msg=trim(tag)//' ('//SOURCE_thermal_externalheat_label//')') + if ( source_thermal_externalheat_nIntervals(instance) > 0_pInt & + .and. source_thermal_externalheat_nIntervals(instance) /= chunkPos(1) - 2_pInt) & + call IO_error(150_pInt,ext_msg=trim(tag)//' ('//SOURCE_thermal_externalheat_label//')') + source_thermal_externalheat_nIntervals(instance) = chunkPos(1) - 2_pInt do interval = 1, source_thermal_externalheat_nIntervals(instance) + 1_pInt - temp_time(instance, interval) = IO_floatValue(line,chunkPos,1_pInt + interval) + select case(tag) + case ('externalheat_time') + temp_time(instance, interval) = IO_floatValue(line,chunkPos,1_pInt + interval) + case ('externalheat_rate') + temp_rate(instance, interval) = IO_floatValue(line,chunkPos,1_pInt + interval) + end select enddo - - case ('externalheat_rate') - do interval = 1, source_thermal_externalheat_nIntervals(instance) + 1_pInt - temp_rate(instance, interval) = IO_floatValue(line,chunkPos,1_pInt + interval) - enddo - end select endif; endif enddo parsingFile @@ -162,13 +166,13 @@ subroutine source_thermal_externalheat_init(fileUnit) initializeInstances: do phase = 1_pInt, material_Nphase if (any(phase_source(:,phase) == SOURCE_thermal_externalheat_ID)) then - NofMyPhase=count(material_phase==phase) + NofMyPhase = count(material_phase==phase) instance = source_thermal_externalheat_instance(phase) sourceOffset = source_thermal_externalheat_offset(phase) source_thermal_externalheat_time(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) = & - temp_time(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) + temp_time(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) source_thermal_externalheat_rate(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) = & - temp_rate(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) + temp_rate(instance,1:source_thermal_externalheat_nIntervals(instance)+1_pInt) sizeDotState = 1_pInt sizeDeltaState = 0_pInt @@ -200,7 +204,8 @@ subroutine source_thermal_externalheat_init(fileUnit) end subroutine source_thermal_externalheat_init !-------------------------------------------------------------------------------------------------- -!> @brief calculates derived quantities from state +!> @brief rate of change of state +!> @details state only contains current time to linearly interpolate given heat powers !-------------------------------------------------------------------------------------------------- subroutine source_thermal_externalheat_dotState(ipc, ip, el) use material, only: & @@ -221,12 +226,12 @@ subroutine source_thermal_externalheat_dotState(ipc, ip, el) constituent = phasememberAt(ipc,ip,el) sourceOffset = source_thermal_externalheat_offset(phase) - sourceState(phase)%p(sourceOffset)%dotState(1,constituent) = 1.0_pReal + sourceState(phase)%p(sourceOffset)%dotState(1,constituent) = 1.0_pReal ! state is current time end subroutine source_thermal_externalheat_dotState !-------------------------------------------------------------------------------------------------- -!> @brief returns local vacancy generation rate +!> @brief returns local heat generation rate !-------------------------------------------------------------------------------------------------- subroutine source_thermal_externalheat_getRateAndItsTangent(TDot, dTDot_dT, ipc, ip, el) use material, only: & @@ -244,21 +249,24 @@ subroutine source_thermal_externalheat_getRateAndItsTangent(TDot, dTDot_dT, ipc, integer(pInt) :: & instance, phase, constituent, sourceOffset, interval real(pReal) :: & - norm_time + frac_time phase = phaseAt(ipc,ip,el) constituent = phasememberAt(ipc,ip,el) instance = source_thermal_externalheat_instance(phase) sourceOffset = source_thermal_externalheat_offset(phase) - do interval = 1, source_thermal_externalheat_nIntervals(instance) - norm_time = (sourceState(phase)%p(sourceOffset)%state(1,constituent) - & + do interval = 1, source_thermal_externalheat_nIntervals(instance) ! scan through all rate segments + frac_time = (sourceState(phase)%p(sourceOffset)%state(1,constituent) - & source_thermal_externalheat_time(instance,interval)) / & (source_thermal_externalheat_time(instance,interval+1) - & - source_thermal_externalheat_time(instance,interval)) - if (norm_time >= 0.0_pReal .and. norm_time < 1.0_pReal) & - TDot = source_thermal_externalheat_rate(instance,interval ) * (1.0_pReal - norm_time) + & - source_thermal_externalheat_rate(instance,interval+1) * norm_time + source_thermal_externalheat_time(instance,interval)) ! fractional time within segment + if ( (frac_time < 0.0_pReal .and. interval == 1) & + .or. (frac_time >= 1.0_pReal .and. interval == source_thermal_externalheat_nIntervals(instance)) & + .or. (frac_time >= 0.0_pReal .and. frac_time < 1.0_pReal) ) & + TDot = source_thermal_externalheat_rate(instance,interval ) * (1.0_pReal - frac_time) + & + source_thermal_externalheat_rate(instance,interval+1) * frac_time ! interpolate heat rate between segment boundaries... + ! ...or extrapolate if outside of bounds enddo dTDot_dT = 0.0 From e93db170da36fe5b364bdfde0e2b9ce046805af4 Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Tue, 18 Oct 2016 16:24:23 -0400 Subject: [PATCH 28/35] fixed typo in help message --- code/spectral_interface.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/spectral_interface.f90 b/code/spectral_interface.f90 index 85ba30c51..7050d7b45 100644 --- a/code/spectral_interface.f90 +++ b/code/spectral_interface.f90 @@ -152,7 +152,7 @@ subroutine DAMASK_interface_init() write(6,'(a)') ' Make sure the file "material.config" exists in the working' write(6,'(a)') ' directory.' write(6,'(a)') ' For further configuration place "numerics.config"' - write(6,'(a)')' and "numerics.config" in that directory.' + write(6,'(a)')' and "debug.config" in that directory.' write(6,'(/,a)')' --restart XX' write(6,'(a)') ' Reads in total increment No. XX-1 and continues to' write(6,'(a)') ' calculate total increment No. XX.' From 12e94bd310a228f1c96d64e1a2667dea7df1422e Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Wed, 19 Oct 2016 08:07:54 -0400 Subject: [PATCH 29/35] revert to continuous spectral derivative, fwbw gets own branch --- code/numerics.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/numerics.f90 b/code/numerics.f90 index c968c7090..24bd19018 100644 --- a/code/numerics.f90 +++ b/code/numerics.f90 @@ -110,7 +110,7 @@ module numerics fftw_plan_mode = 'FFTW_PATIENT' !< reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag character(len=64), protected, public :: & spectral_solver = 'basicpetsc' , & !< spectral solution method - spectral_derivative = 'fwbw_difference' !< spectral spatial derivative method + spectral_derivative = 'continuous' !< spectral spatial derivative method character(len=1024), protected, public :: & petsc_defaultOptions = '-mech_snes_type ngmres & &-damage_snes_type ngmres & From c5217d8db5acac152422761cd8eeeb444fa10b4a Mon Sep 17 00:00:00 2001 From: Test User Date: Thu, 20 Oct 2016 04:41:54 +0200 Subject: [PATCH 30/35] updated version information after successful test of v2.0.1-244-g12e94bd --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index d0bf70ba1..110bfbbce 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-238-g96349eb +v2.0.1-244-g12e94bd From cc28da502bc5f74ae55e4fb4b4bd150589d3920d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Oct 2016 15:05:34 +0200 Subject: [PATCH 31/35] ifort 2017 requires new option format --- code/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/Makefile b/code/Makefile index 331feec27..5a43040aa 100644 --- a/code/Makefile +++ b/code/Makefile @@ -65,7 +65,7 @@ endif # settings for shared memory multicore support ifeq "$(OPENMP)" "ON" -OPENMP_FLAG_ifort =-openmp -openmp-report0 -parallel +OPENMP_FLAG_ifort =-qopenmp -parallel OPENMP_FLAG_gfortran =-fopenmp endif From 633744bfa9b1e9ab7632d2205d7b444e842a8814 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Oct 2016 15:08:47 +0200 Subject: [PATCH 32/35] used only for commercial FEM anyway --- code/CPFEM.f90 | 53 -------------------------------------------------- 1 file changed, 53 deletions(-) diff --git a/code/CPFEM.f90 b/code/CPFEM.f90 index 0774fba86..a1dac9801 100644 --- a/code/CPFEM.f90 +++ b/code/CPFEM.f90 @@ -10,7 +10,6 @@ module CPFEM implicit none private -#if defined(Marc4DAMASK) || defined(Abaqus) real(pReal), parameter, private :: & CPFEM_odd_stress = 1e15_pReal, & !< return value for stress in case of ping pong dummy cycle CPFEM_odd_jacobian = 1e50_pReal !< return value for jacobian in case of ping pong dummy cycle @@ -20,7 +19,6 @@ module CPFEM CPFEM_dcsdE !< Cauchy stress tangent real(pReal), dimension (:,:,:,:), allocatable, private :: & CPFEM_dcsdE_knownGood !< known good tangent -#endif integer(pInt), public :: & cycleCounter = 0_pInt, & !< needs description theInc = -1_pInt, & !< needs description @@ -83,10 +81,6 @@ subroutine CPFEM_initAll(el,ip) use IO, only: & IO_init use DAMASK_interface -#ifdef FEM - use FEZoo, only: & - FEZoo_init -#endif implicit none integer(pInt), intent(in) :: el, & !< FE el number @@ -97,9 +91,6 @@ subroutine CPFEM_initAll(el,ip) call DAMASK_interface_init ! Spectral and FEM interface to commandline call prec_init call IO_init -#ifdef FEM - call FEZoo_init -#endif call numerics_init call debug_init call math_init @@ -138,16 +129,12 @@ subroutine CPFEM_init debug_levelBasic, & debug_levelExtensive use FEsolving, only: & -#if defined(Marc4DAMASK) || defined(Abaqus) symmetricSolver, & -#endif restartRead, & modelName -#if defined(Marc4DAMASK) || defined(Abaqus) use mesh, only: & mesh_NcpElems, & mesh_maxNips -#endif use material, only: & material_phase, & homogState, & @@ -173,12 +160,10 @@ subroutine CPFEM_init #include "compilation_info.f90" endif mainProcess -#if defined(Marc4DAMASK) || defined(Abaqus) ! initialize stress and jacobian to zero allocate(CPFEM_cs(6,mesh_maxNips,mesh_NcpElems)) ; CPFEM_cs = 0.0_pReal allocate(CPFEM_dcsdE(6,6,mesh_maxNips,mesh_NcpElems)) ; CPFEM_dcsdE = 0.0_pReal allocate(CPFEM_dcsdE_knownGood(6,6,mesh_maxNips,mesh_NcpElems)) ; CPFEM_dcsdE_knownGood = 0.0_pReal -#endif ! *** restore the last converged values of each essential variable from the binary file if (restartRead) then @@ -243,21 +228,17 @@ subroutine CPFEM_init enddo readHomogInstances close (777) -#if defined(Marc4DAMASK) || defined(Abaqus) call IO_read_realFile(777,'convergeddcsdE',modelName,size(CPFEM_dcsdE)) read (777,rec=1) CPFEM_dcsdE close (777) -#endif restartRead = .false. endif -#if defined(Marc4DAMASK) || defined(Abaqus) if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0) then write(6,'(a32,1x,6(i8,1x))') 'CPFEM_cs: ', shape(CPFEM_cs) write(6,'(a32,1x,6(i8,1x))') 'CPFEM_dcsdE: ', shape(CPFEM_dcsdE) write(6,'(a32,1x,6(i8,1x),/)') 'CPFEM_dcsdE_knownGood: ', shape(CPFEM_dcsdE_knownGood) write(6,'(a32,l1)') 'symmetricSolver: ', symmetricSolver endif -#endif flush(6) end subroutine CPFEM_init @@ -266,11 +247,7 @@ end subroutine CPFEM_init !-------------------------------------------------------------------------------------------------- !> @brief perform initialization at first call, update variables and call the actual material model !-------------------------------------------------------------------------------------------------- -#if defined(Marc4DAMASK) || defined(Abaqus) subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian) -#else -subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) -#endif use numerics, only: & defgradTolerance, & iJacoStiffness, & @@ -281,7 +258,6 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) debug_levelBasic, & debug_levelExtensive, & debug_levelSelective, & -#if defined(Marc4DAMASK) || defined(Abaqus) debug_stressMaxLocation, & debug_stressMinLocation, & debug_jacobianMaxLocation, & @@ -290,7 +266,6 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) debug_stressMin, & debug_jacobianMax, & debug_jacobianMin, & -#endif debug_e, & debug_i use FEsolving, only: & @@ -348,12 +323,10 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) use homogenization, only: & materialpoint_F, & materialpoint_F0, & -#if defined(Marc4DAMASK) || defined(Abaqus) materialpoint_P, & materialpoint_dPdF, & materialpoint_results, & materialpoint_sizeResults, & -#endif materialpoint_stressAndItsTangent, & materialpoint_postResults use IO, only: & @@ -368,7 +341,6 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) real(pReal), dimension (3,3), intent(in) :: ffn, & !< deformation gradient for t=t0 ffn1 !< deformation gradient for t=t1 integer(pInt), intent(in) :: mode !< computation mode 1: regular computation plus aging of results -#if defined(Marc4DAMASK) || defined(Abaqus) real(pReal), intent(in) :: temperature_inp !< temperature logical, intent(in) :: parallelExecution !< flag indicating parallel computation of requested IPs real(pReal), dimension(6), intent(out) :: cauchyStress !< stress vector in Mandel notation @@ -381,20 +353,13 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) real(pReal), dimension (3,3,3,3) :: H_sym, & H, & jacobian3333 ! jacobian in Matrix notation -#else - logical, parameter :: parallelExecution = .true. -#endif integer(pInt) elCP, & ! crystal plasticity element number i, j, k, l, m, n, ph, homog, mySource logical updateJaco ! flag indicating if JAcobian has to be updated character(len=1024) :: rankStr -#if defined(Marc4DAMASK) || defined(Abaqus) elCP = mesh_FEasCP('elem',elFE) -#else - elCP = elFE -#endif if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt & .and. elCP == debug_e .and. ip == debug_i) then @@ -411,13 +376,10 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) write(6,'(a,/)') '#############################################'; flush (6) endif - -#if defined(Marc4DAMASK) || defined(Abaqus) if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) & CPFEM_dcsde_knownGood = CPFEM_dcsde if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) & CPFEM_dcsde = CPFEM_dcsde_knownGood -#endif !*** age results and write restart data if requested if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) then @@ -514,11 +476,9 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) enddo writeHomogInstances close (777) -#if defined(Marc4DAMASK) || defined(Abaqus) call IO_write_jobRealFile(777,'convergeddcsdE',size(CPFEM_dcsdE)) write (777,rec=1) CPFEM_dcsdE close (777) -#endif endif endif ! results aging @@ -529,22 +489,18 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) !* If no parallel execution is required, there is no need to collect FEM input if (.not. parallelExecution) then -#if defined(Marc4DAMASK) || defined(Abaqus) temperature(material_homog(ip,elCP))%p(thermalMapping(material_homog(ip,elCP))%p(ip,elCP)) = & temperature_inp -#endif materialpoint_F0(1:3,1:3,ip,elCP) = ffn materialpoint_F(1:3,1:3,ip,elCP) = ffn1 elseif (iand(mode, CPFEM_COLLECT) /= 0_pInt) then -#if defined(Marc4DAMASK) || defined(Abaqus) call random_number(rnd) if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal CPFEM_cs(1:6,ip,elCP) = rnd * CPFEM_odd_stress CPFEM_dcsde(1:6,1:6,ip,elCP) = CPFEM_odd_jacobian * math_identity2nd(6) temperature(material_homog(ip,elCP))%p(thermalMapping(material_homog(ip,elCP))%p(ip,elCP)) = & temperature_inp -#endif materialpoint_F0(1:3,1:3,ip,elCP) = ffn materialpoint_F(1:3,1:3,ip,elCP) = ffn1 CPFEM_calc_done = .false. @@ -569,12 +525,10 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) endif outdatedFFN1 = .true. endif -#if defined(Marc4DAMASK) || defined(Abaqus) call random_number(rnd) if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal CPFEM_cs(1:6,ip,elCP) = rnd*CPFEM_odd_stress CPFEM_dcsde(1:6,1:6,ip,elCP) = CPFEM_odd_jacobian*math_identity2nd(6) -#endif !*** deformation gradient is not outdated @@ -607,7 +561,6 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) endif !* map stress and stiffness (or return odd values if terminally ill) -#if defined(Marc4DAMASK) || defined(Abaqus) terminalIllness: if ( terminallyIll ) then call random_number(rnd) @@ -648,11 +601,8 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) CPFEM_dcsde(1:6,1:6,ip,elCP) = math_Mandel3333to66(J_inverse * H_sym) endif terminalIllness -#endif - endif validCalculation -#if defined(Marc4DAMASK) || defined(Abaqus) !* report stress and stiffness if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & .and. ((debug_e == elCP .and. debug_i == ip) & @@ -663,11 +613,9 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) '<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal flush(6) endif -#endif endif -#if defined(Marc4DAMASK) || defined(Abaqus) !*** warn if stiffness close to zero if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip) @@ -696,7 +644,6 @@ subroutine CPFEM_general(mode, ffn, ffn1, dt, elFE, ip) debug_jacobianMinLocation = [elCP, ip] debug_jacobianMin = minval(jacobian3333) endif -#endif end subroutine CPFEM_general From 54d98a20466c6e9239dc905d0567240773e61a2e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Oct 2016 15:11:14 +0200 Subject: [PATCH 33/35] leftovers from perturbation calculation --- code/crystallite.f90 | 6 ------ 1 file changed, 6 deletions(-) diff --git a/code/crystallite.f90 b/code/crystallite.f90 index 399e63b0e..2212ba9a1 100644 --- a/code/crystallite.f90 +++ b/code/crystallite.f90 @@ -513,8 +513,6 @@ subroutine crystallite_stressAndItsTangent(updateJaco) subStepMinCryst, & subStepSizeCryst, & stepIncreaseCryst, & - pert_Fg, & - pert_method, & nCryst, & numerics_integrator, & numerics_integrationMode, & @@ -574,7 +572,6 @@ subroutine crystallite_stressAndItsTangent(updateJaco) logical, intent(in) :: & updateJaco !< whether to update the Jacobian (stiffness) or not real(pReal) :: & - myPert, & ! perturbation with correct sign formerSubStep, & subFracIntermediate real(pReal), dimension(3,3) :: & @@ -586,14 +583,11 @@ subroutine crystallite_stressAndItsTangent(updateJaco) c, & !< counter in integration point component loop i, & !< counter in integration point loop e, & !< counter in element loop - k, & - l, & n, startIP, endIP, & neighboring_e, & neighboring_i, & o, & p, & - perturbation , & ! loop counter for forward,backward perturbation mode myNcomponents, & mySource ! local variables used for calculating analytic Jacobian From 54c0ba5599f6444b08afcbb0f8bcd19ebac1f914 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Oct 2016 15:12:40 +0200 Subject: [PATCH 34/35] Abaqus does not restore the jacobian (strange, but true) --- code/DAMASK_abaqus_std.f | 1 - 1 file changed, 1 deletion(-) diff --git a/code/DAMASK_abaqus_std.f b/code/DAMASK_abaqus_std.f index cdd12dac8..d15682c58 100644 --- a/code/DAMASK_abaqus_std.f +++ b/code/DAMASK_abaqus_std.f @@ -116,7 +116,6 @@ subroutine UMAT(STRESS,STATEV,DDSDDE,SSE,SPD,SCD,& CPFEM_CALCRESULTS, & CPFEM_AGERESULTS, & CPFEM_COLLECT, & - CPFEM_RESTOREJACOBIAN, & CPFEM_BACKUPJACOBIAN, & cycleCounter, & theInc, & From 20985f83b51f7995e31c1016bf12809edf774c78 Mon Sep 17 00:00:00 2001 From: Test User Date: Sat, 22 Oct 2016 11:02:13 +0200 Subject: [PATCH 35/35] updated version information after successful test of v2.0.1-249-g54c0ba5 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 110bfbbce..6a2d0648d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-244-g12e94bd +v2.0.1-249-g54c0ba5