merge development
This commit is contained in:
parent
61265ff994
commit
6ec543d086
|
@ -2,7 +2,7 @@ name: Grid and Mesh Solver
|
||||||
on: [push]
|
on: [push]
|
||||||
|
|
||||||
env:
|
env:
|
||||||
PETSC_VERSION: '3.16.2'
|
PETSC_VERSION: '3.17.0'
|
||||||
HOMEBREW_NO_ANALYTICS: 'ON' # Make Homebrew installation a little quicker
|
HOMEBREW_NO_ANALYTICS: 'ON' # Make Homebrew installation a little quicker
|
||||||
HOMEBREW_NO_AUTO_UPDATE: 'ON'
|
HOMEBREW_NO_AUTO_UPDATE: 'ON'
|
||||||
HOMEBREW_NO_BOTTLE_SOURCE_FALLBACK: 'ON'
|
HOMEBREW_NO_BOTTLE_SOURCE_FALLBACK: 'ON'
|
||||||
|
|
|
@ -10,7 +10,8 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.8', '3.9'] #, '3.10']
|
python-version: ['3.8', '3.9'] #, '3.10']
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -25,11 +26,18 @@ jobs:
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install pytest pandas scipy h5py vtk matplotlib pyyaml
|
pip install pytest pandas scipy h5py vtk matplotlib pyyaml
|
||||||
|
|
||||||
- name: Install and run unit tests
|
- name: Install and run unit tests (Unix)
|
||||||
|
if: runner.os != 'Windows'
|
||||||
run: |
|
run: |
|
||||||
python -m pip install ./python --no-deps -vv --use-feature=in-tree-build
|
python -m pip install ./python --no-deps -vv --use-feature=in-tree-build
|
||||||
COLUMNS=256 pytest python
|
COLUMNS=256 pytest python
|
||||||
|
|
||||||
|
- name: Install and run unit tests (Windows)
|
||||||
|
if: runner.os == 'Windows'
|
||||||
|
run: |
|
||||||
|
python -m pip install ./python --no-deps -vv --use-feature=in-tree-build
|
||||||
|
pytest python -k 'not XDMF'
|
||||||
|
|
||||||
apt:
|
apt:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -9,8 +9,10 @@ endif()
|
||||||
|
|
||||||
# Dummy project to determine compiler names and version
|
# Dummy project to determine compiler names and version
|
||||||
project(Prerequisites LANGUAGES)
|
project(Prerequisites LANGUAGES)
|
||||||
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig")
|
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
|
||||||
pkg_check_modules(PETSC REQUIRED PETSc>=3.12.0 PETSc<3.17.0)
|
pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range
|
||||||
|
pkg_check_modules(PETSC REQUIRED PETSc<3.18.0)
|
||||||
|
|
||||||
pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler)
|
pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler)
|
||||||
pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler)
|
pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler)
|
||||||
|
|
||||||
|
@ -25,6 +27,13 @@ else()
|
||||||
endif()
|
endif()
|
||||||
add_definitions("-D${DAMASK_SOLVER}")
|
add_definitions("-D${DAMASK_SOLVER}")
|
||||||
|
|
||||||
|
# EXPERIMENTAL: This might help to detect HDF5 and FFTW3 in the future if PETSc is not aware of them
|
||||||
|
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/externalpackages:$ENV{PKG_CONFIG_PATH}")
|
||||||
|
pkg_check_modules(HDF5 hdf5)
|
||||||
|
pkg_check_modules(FFTW3 fftw3)
|
||||||
|
pkg_check_modules(fYAML libfyaml)
|
||||||
|
pkg_check_modules(zlib zlib)
|
||||||
|
|
||||||
file(STRINGS ${PROJECT_SOURCE_DIR}/VERSION DAMASK_VERSION)
|
file(STRINGS ${PROJECT_SOURCE_DIR}/VERSION DAMASK_VERSION)
|
||||||
|
|
||||||
message("\nBuilding ${CMAKE_PROJECT_NAME} ${DAMASK_VERSION}\n")
|
message("\nBuilding ${CMAKE_PROJECT_NAME} ${DAMASK_VERSION}\n")
|
||||||
|
@ -32,6 +41,9 @@ message("\nBuilding ${CMAKE_PROJECT_NAME} ${DAMASK_VERSION}\n")
|
||||||
add_definitions(-DPETSC)
|
add_definitions(-DPETSC)
|
||||||
add_definitions(-DDAMASKVERSION="${DAMASK_VERSION}")
|
add_definitions(-DDAMASKVERSION="${DAMASK_VERSION}")
|
||||||
add_definitions(-DCMAKE_SYSTEM="${CMAKE_SYSTEM}")
|
add_definitions(-DCMAKE_SYSTEM="${CMAKE_SYSTEM}")
|
||||||
|
if(PETSC_VERSION VERSION_GREATER_EQUAL 3.17.0)
|
||||||
|
add_definitions("-DCHKERRQ=PetscCall")
|
||||||
|
endif()
|
||||||
|
|
||||||
if(CMAKE_BUILD_TYPE STREQUAL "")
|
if(CMAKE_BUILD_TYPE STREQUAL "")
|
||||||
set(CMAKE_BUILD_TYPE "RELEASE")
|
set(CMAKE_BUILD_TYPE "RELEASE")
|
||||||
|
@ -104,8 +116,18 @@ if(CMAKE_BUILD_TYPE STREQUAL "DEBUG")
|
||||||
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} ${DEBUG_FLAGS}")
|
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} ${DEBUG_FLAGS}")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${PETSC_INCLUDES} ${BUILDCMD_POST}")
|
set(CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${PETSC_INCLUDES} ${BUILDCMD_POST}")
|
||||||
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} <OBJECTS> -o <TARGET> <LINK_LIBRARIES> -L${PETSC_LIBRARY_DIRS} -lpetsc ${PETSC_EXTERNAL_LIB} -lz ${BUILDCMD_POST}")
|
|
||||||
|
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} <OBJECTS> -o <TARGET> <LINK_LIBRARIES> -L${PETSC_LIBRARY_DIRS} -lpetsc ${PETSC_EXTERNAL_LIB} -lz")
|
||||||
|
|
||||||
|
if(fYAML_FOUND STREQUAL "1")
|
||||||
|
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} -L${fYAML_LIBRARY_DIRS} -l${fYAML_LIBRARIES}")
|
||||||
|
add_definitions(-DFYAML)
|
||||||
|
pkg_get_variable(fYAML_INCLUDE_DIR libfyaml includedir) # fYAML_INCLUDE_DIRS and fYAML_CFLAGS are not working
|
||||||
|
set(CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}} -I${fYAML_INCLUDE_DIR}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} ${BUILDCMD_POST}")
|
||||||
|
|
||||||
message("Fortran Compiler Flags:\n${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}}\n")
|
message("Fortran Compiler Flags:\n${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}}\n")
|
||||||
message("C Compiler Flags:\n${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}\n")
|
message("C Compiler Flags:\n${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}\n")
|
||||||
|
|
13
README
13
README
|
@ -1,13 +0,0 @@
|
||||||
DAMASK - The Düsseldorf Advanced Material Simulation Kit
|
|
||||||
Visit damask.mpie.de for installation and usage instructions
|
|
||||||
|
|
||||||
CONTACT INFORMATION
|
|
||||||
|
|
||||||
Max-Planck-Institut für Eisenforschung GmbH
|
|
||||||
Max-Planck-Str. 1
|
|
||||||
40237 Düsseldorf
|
|
||||||
Germany
|
|
||||||
|
|
||||||
damask@mpie.de
|
|
||||||
https://damask.mpie.de
|
|
||||||
https://git.damask.mpie.de
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
# DAMASK - The Düsseldorf Advanced Material Simulation Kit
|
||||||
|
|
||||||
|
Visit [damask.mpie.de](https://damask.mpie.de) for installation and usage instructions
|
||||||
|
|
||||||
|
## Contact Information
|
||||||
|
|
||||||
|
Max-Planck-Institut für Eisenforschung GmbH
|
||||||
|
Max-Planck-Str. 1
|
||||||
|
40237 Düsseldorf
|
||||||
|
Germany
|
||||||
|
|
||||||
|
damask@mpie.de
|
||||||
|
https://damask.mpie.de
|
||||||
|
https://git.damask.mpie.de
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
||||||
+H5FC="$(h5fc -shlib -show)"
|
+H5FC="$(h5fc -shlib -show)"
|
||||||
+HDF5_LIB=${H5FC//ifort/}
|
+HDF5_LIB=${H5FC//ifort/}
|
||||||
+FCOMP="$H5FC -DDAMASK_HDF5"
|
+FCOMP="$H5FC"
|
||||||
|
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
||||||
+H5FC="$(h5fc -shlib -show)"
|
+H5FC="$(h5fc -shlib -show)"
|
||||||
+HDF5_LIB=${H5FC//ifort/}
|
+HDF5_LIB=${H5FC//ifort/}
|
||||||
+FCOMP="$H5FC -DDAMASK_HDF5"
|
+FCOMP="$H5FC"
|
||||||
|
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
+# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
|
||||||
+H5FC="$(h5fc -shlib -show)"
|
+H5FC="$(h5fc -shlib -show)"
|
||||||
+HDF5_LIB=${H5FC//ifort/}
|
+HDF5_LIB=${H5FC//ifort/}
|
||||||
+FCOMP="$H5FC -DDAMASK_HDF5"
|
+FCOMP="$H5FC"
|
||||||
+
|
+
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
v3.0.0-alpha6-170-gfab34d5c3
|
v3.0.0-alpha6-228-g758ad6072
|
||||||
|
|
|
@ -2,7 +2,6 @@ import os
|
||||||
import json
|
import json
|
||||||
import functools
|
import functools
|
||||||
import colorsys
|
import colorsys
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union, TextIO
|
from typing import Union, TextIO
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -325,12 +324,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
File handle with write access.
|
File handle with write access.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if fname is None:
|
return util.open_text(self.name.replace(' ','_')+suffix if fname is None else fname, 'w')
|
||||||
return open(self.name.replace(' ','_')+suffix, 'w', newline='\n')
|
|
||||||
elif isinstance(fname, (str, Path)):
|
|
||||||
return open(Path(fname).expanduser(), 'w', newline='\n')
|
|
||||||
else:
|
|
||||||
return fname
|
|
||||||
|
|
||||||
|
|
||||||
def save_paraview(self,
|
def save_paraview(self,
|
||||||
|
|
|
@ -2,7 +2,6 @@ import copy
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
import abc
|
import abc
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union, Dict, Any, Type, TypeVar
|
from typing import Union, Dict, Any, Type, TypeVar
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -10,6 +9,7 @@ import yaml
|
||||||
|
|
||||||
from ._typehints import FileHandle
|
from ._typehints import FileHandle
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
|
from . import util
|
||||||
|
|
||||||
MyType = TypeVar('MyType', bound='Config')
|
MyType = TypeVar('MyType', bound='Config')
|
||||||
|
|
||||||
|
@ -144,10 +144,7 @@ class Config(dict):
|
||||||
Configuration from file.
|
Configuration from file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
fhandle = open(Path(fname).expanduser()) if isinstance(fname, (str, Path)) else \
|
return cls(yaml.safe_load(util.open_text(fname)))
|
||||||
fname
|
|
||||||
|
|
||||||
return cls(yaml.safe_load(fhandle))
|
|
||||||
|
|
||||||
def save(self,
|
def save(self,
|
||||||
fname: FileHandle,
|
fname: FileHandle,
|
||||||
|
@ -163,9 +160,6 @@ class Config(dict):
|
||||||
Keyword arguments parsed to yaml.dump.
|
Keyword arguments parsed to yaml.dump.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
fhandle = open(Path(fname).expanduser(),'w',newline='\n') if isinstance(fname, (str, Path)) else \
|
|
||||||
fname
|
|
||||||
|
|
||||||
if 'width' not in kwargs:
|
if 'width' not in kwargs:
|
||||||
kwargs['width'] = 256
|
kwargs['width'] = 256
|
||||||
if 'default_flow_style' not in kwargs:
|
if 'default_flow_style' not in kwargs:
|
||||||
|
@ -173,6 +167,7 @@ class Config(dict):
|
||||||
if 'sort_keys' not in kwargs:
|
if 'sort_keys' not in kwargs:
|
||||||
kwargs['sort_keys'] = False
|
kwargs['sort_keys'] = False
|
||||||
|
|
||||||
|
fhandle = util.open_text(fname,'w')
|
||||||
try:
|
try:
|
||||||
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
|
fhandle.write(yaml.dump(self,Dumper=NiceDumper,**kwargs))
|
||||||
except TypeError: # compatibility with old pyyaml
|
except TypeError: # compatibility with old pyyaml
|
||||||
|
|
|
@ -314,7 +314,7 @@ class Grid:
|
||||||
>>> import damask
|
>>> import damask
|
||||||
>>> N_grains = 20
|
>>> N_grains = 20
|
||||||
>>> cells = (32,32,32)
|
>>> cells = (32,32,32)
|
||||||
>>> damask.util.run(f'neper -T -n {N_grains} -tesrsize {cells[0]}:{cells[1]}:{cells[2]} -periodicity "all" -format "vtk"')
|
>>> damask.util.run(f'neper -T -n {N_grains} -tesrsize {cells[0]}:{cells[1]}:{cells[2]} -periodicity all -format vtk')
|
||||||
>>> damask.Grid.load_Neper(f'n{N_grains}-id1.vtk')
|
>>> damask.Grid.load_Neper(f'n{N_grains}-id1.vtk')
|
||||||
cells: 32 × 32 × 32
|
cells: 32 × 32 × 32
|
||||||
size: 1.0 × 1.0 × 1.0 m³
|
size: 1.0 × 1.0 × 1.0 m³
|
||||||
|
@ -786,8 +786,8 @@ class Grid:
|
||||||
# materials: 1
|
# materials: 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
offset_ = np.array(offset,int) if offset is not None else np.zeros(3,int)
|
offset_ = np.array(offset,np.int64) if offset is not None else np.zeros(3,np.int64)
|
||||||
cells_ = np.array(cells,int) if cells is not None else self.cells
|
cells_ = np.array(cells,np.int64) if cells is not None else self.cells
|
||||||
|
|
||||||
canvas = np.full(cells_,np.nanmax(self.material) + 1 if fill is None else fill,self.material.dtype)
|
canvas = np.full(cells_,np.nanmax(self.material) + 1 if fill is None else fill,self.material.dtype)
|
||||||
|
|
||||||
|
@ -829,7 +829,7 @@ class Grid:
|
||||||
|
|
||||||
>>> import numpy as np
|
>>> import numpy as np
|
||||||
>>> import damask
|
>>> import damask
|
||||||
>>> g = damask.Grid(np.zeros([32]*3,int), np.ones(3)*1e-4)
|
>>> g = damask.Grid(np.zeros([32]*3,int),np.ones(3)*1e-4)
|
||||||
>>> g.mirror('xy',True)
|
>>> g.mirror('xy',True)
|
||||||
cells : 64 x 64 x 32
|
cells : 64 x 64 x 32
|
||||||
size : 0.0002 x 0.0002 x 0.0001 m³
|
size : 0.0002 x 0.0002 x 0.0001 m³
|
||||||
|
|
|
@ -1151,14 +1151,14 @@ class Result:
|
||||||
|
|
||||||
>>> import damask
|
>>> import damask
|
||||||
>>> r = damask.Result('my_file.hdf5')
|
>>> r = damask.Result('my_file.hdf5')
|
||||||
>>> r.strain(t='U',m=0.5)
|
>>> r.add_strain(t='U',m=0.5)
|
||||||
|
|
||||||
Add the plastic Euler-Almansi strain based on the
|
Add the plastic Euler-Almansi strain based on the
|
||||||
plastic deformation gradient 'F_p':
|
plastic deformation gradient 'F_p':
|
||||||
|
|
||||||
>>> import damask
|
>>> import damask
|
||||||
>>> r = damask.Result('my_file.hdf5')
|
>>> r = damask.Result('my_file.hdf5')
|
||||||
>>> r.strain('F_p','V',-1)
|
>>> r.add_strain('F_p','V',-1)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._add_generic_pointwise(self._add_strain,{'F':F},{'t':t,'m':m})
|
self._add_generic_pointwise(self._add_strain,{'F':F},{'t':t,'m':m})
|
||||||
|
@ -1538,7 +1538,7 @@ class Result:
|
||||||
np.prod(shape))}
|
np.prod(shape))}
|
||||||
data_items[-1].text = f'{os.path.split(self.fname)[1]}:{name}'
|
data_items[-1].text = f'{os.path.split(self.fname)[1]}:{name}'
|
||||||
|
|
||||||
with open(self.fname.with_suffix('.xdmf').name,'w',newline='\n') as f:
|
with util.open_text(self.fname.with_suffix('.xdmf').name,'w') as f:
|
||||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||||
|
|
||||||
|
|
||||||
|
@ -1803,7 +1803,7 @@ class Result:
|
||||||
if type(obj) == h5py.Dataset and _match(output,[name]):
|
if type(obj) == h5py.Dataset and _match(output,[name]):
|
||||||
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
|
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
|
||||||
if not Path(name).exists() or overwrite:
|
if not Path(name).exists() or overwrite:
|
||||||
with open(name,'w') as f_out: f_out.write(obj[0].decode())
|
with util.open_text(name,'w') as f_out: f_out.write(obj[0].decode())
|
||||||
print(f'Exported {d} to "{name}".')
|
print(f'Exported {d} to "{name}".')
|
||||||
else:
|
else:
|
||||||
print(f'"{name}" exists, {d} not exported.')
|
print(f'"{name}" exists, {d} not exported.')
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union, Tuple, List
|
from typing import Union, Tuple, List
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
@ -260,7 +259,7 @@ class Table:
|
||||||
Table data from file.
|
Table data from file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
f = open(Path(fname).expanduser()) if isinstance(fname, (str, Path)) else fname
|
f = util.open_text(fname)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
|
|
||||||
comments = []
|
comments = []
|
||||||
|
@ -312,7 +311,7 @@ class Table:
|
||||||
Table data from file.
|
Table data from file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
f = open(fname) if isinstance(fname, (str, Path)) else fname
|
f = util.open_text(fname)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
|
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
|
@ -594,7 +593,7 @@ class Table:
|
||||||
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
|
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
|
||||||
for i in range(np.prod(self.shapes[l]))]
|
for i in range(np.prod(self.shapes[l]))]
|
||||||
|
|
||||||
f = open(Path(fname).expanduser(),'w',newline='\n') if isinstance(fname, (str, Path)) else fname
|
f = util.open_text(fname,'w')
|
||||||
|
|
||||||
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else ''))
|
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else ''))
|
||||||
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
|
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,line_terminator='\n')
|
||||||
|
|
|
@ -159,8 +159,8 @@ def coordinates0_point(cells: _IntSequence,
|
||||||
|
|
||||||
"""
|
"""
|
||||||
size_ = _np.array(size,float)
|
size_ = _np.array(size,float)
|
||||||
start = origin + size_/_np.array(cells,int)*.5
|
start = origin + size_/_np.array(cells,_np.int64)*.5
|
||||||
end = origin + size_ - size_/_np.array(cells,int)*.5
|
end = origin + size_ - size_/_np.array(cells,_np.int64)*.5
|
||||||
|
|
||||||
return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],cells[0]),
|
return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],cells[0]),
|
||||||
_np.linspace(start[1],end[1],cells[1]),
|
_np.linspace(start[1],end[1],cells[1]),
|
||||||
|
@ -290,7 +290,7 @@ def cellsSizeOrigin_coordinates0_point(coordinates0: _np.ndarray,
|
||||||
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
|
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
|
||||||
mincorner = _np.array(list(map(min,coords)))
|
mincorner = _np.array(list(map(min,coords)))
|
||||||
maxcorner = _np.array(list(map(max,coords)))
|
maxcorner = _np.array(list(map(max,coords)))
|
||||||
cells = _np.array(list(map(len,coords)),int)
|
cells = _np.array(list(map(len,coords)),_np.int64)
|
||||||
size = cells/_np.maximum(cells-1,1) * (maxcorner-mincorner)
|
size = cells/_np.maximum(cells-1,1) * (maxcorner-mincorner)
|
||||||
delta = size/cells
|
delta = size/cells
|
||||||
origin = mincorner - delta*.5
|
origin = mincorner - delta*.5
|
||||||
|
@ -455,7 +455,7 @@ def cellsSizeOrigin_coordinates0_node(coordinates0: _np.ndarray,
|
||||||
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
|
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
|
||||||
mincorner = _np.array(list(map(min,coords)))
|
mincorner = _np.array(list(map(min,coords)))
|
||||||
maxcorner = _np.array(list(map(max,coords)))
|
maxcorner = _np.array(list(map(max,coords)))
|
||||||
cells = _np.array(list(map(len,coords)),int) - 1
|
cells = _np.array(list(map(len,coords)),_np.int64) - 1
|
||||||
size = maxcorner-mincorner
|
size = maxcorner-mincorner
|
||||||
origin = mincorner
|
origin = mincorner
|
||||||
|
|
||||||
|
|
|
@ -10,20 +10,21 @@ import signal
|
||||||
import fractions
|
import fractions
|
||||||
from collections import abc
|
from collections import abc
|
||||||
from functools import reduce, partial
|
from functools import reduce, partial
|
||||||
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any, Collection
|
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any, Collection, TextIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from . import version
|
from . import version
|
||||||
from ._typehints import FloatSequence, NumpyRngSeed, IntCollection
|
from ._typehints import FloatSequence, NumpyRngSeed, IntCollection, FileHandle
|
||||||
|
|
||||||
# limit visibility
|
# limit visibility
|
||||||
__all__=[
|
__all__=[
|
||||||
'srepr',
|
'srepr',
|
||||||
'emph', 'deemph', 'warn', 'strikeout',
|
'emph', 'deemph', 'warn', 'strikeout',
|
||||||
'run',
|
'run',
|
||||||
|
'open_text',
|
||||||
'natural_sort',
|
'natural_sort',
|
||||||
'show_progress',
|
'show_progress',
|
||||||
'scale_to_coprime',
|
'scale_to_coprime',
|
||||||
|
@ -206,7 +207,25 @@ def run(cmd: str,
|
||||||
return stdout, stderr
|
return stdout, stderr
|
||||||
|
|
||||||
|
|
||||||
execute = run
|
def open_text(fname: FileHandle,
|
||||||
|
mode: Literal['r','w'] = 'r') -> TextIO:
|
||||||
|
"""
|
||||||
|
Open a text file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path
|
||||||
|
Name or handle of file.
|
||||||
|
mode: {'r','w'}, optional
|
||||||
|
Access mode: 'r'ead or 'w'rite, defaults to 'r'.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
f : file handle
|
||||||
|
|
||||||
|
"""
|
||||||
|
return fname if not isinstance(fname, (str,Path)) else \
|
||||||
|
open(Path(fname).expanduser(),mode,newline=('\n' if mode == 'w' else None))
|
||||||
|
|
||||||
|
|
||||||
def natural_sort(key: str) -> List[Union[int, str]]:
|
def natural_sort(key: str) -> List[Union[int, str]]:
|
||||||
|
@ -431,7 +450,7 @@ def hybrid_IA(dist: np.ndarray,
|
||||||
|
|
||||||
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
|
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
|
||||||
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
||||||
repeats = np.rint(scale*dist).astype(int)
|
repeats = np.rint(scale*dist).astype(np.int64)
|
||||||
N_inv_samples = np.sum(repeats)
|
N_inv_samples = np.sum(repeats)
|
||||||
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
|
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
|
||||||
if N_inv_samples < N_opt_samples else \
|
if N_inv_samples < N_opt_samples else \
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -41,6 +42,7 @@ class TestConfigMaterial:
|
||||||
material_config['material'][0]['constituents'][0]['O']=[0,0,0,0]
|
material_config['material'][0]['constituents'][0]['O']=[0,0,0,0]
|
||||||
assert not material_config.is_valid
|
assert not material_config.is_valid
|
||||||
|
|
||||||
|
@pytest.mark.xfail(sys.platform == 'win32', reason='utf8 "not equal" might cause trouble')
|
||||||
def test_invalid_fraction(self,ref_path):
|
def test_invalid_fraction(self,ref_path):
|
||||||
material_config = ConfigMaterial.load(ref_path/'material.yaml')
|
material_config = ConfigMaterial.load(ref_path/'material.yaml')
|
||||||
material_config['material'][0]['constituents'][0]['v']=.9
|
material_config['material'][0]['constituents'][0]['v']=.9
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import vtk
|
import vtk
|
||||||
|
@ -47,6 +49,7 @@ class TestGrid:
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('cmap',[Colormap.from_predefined('stress'),'viridis'])
|
@pytest.mark.parametrize('cmap',[Colormap.from_predefined('stress'),'viridis'])
|
||||||
|
@pytest.mark.skipif(sys.platform == 'win32', reason='DISPLAY has no effect on windows')
|
||||||
def test_show(sef,default,cmap,monkeypatch):
|
def test_show(sef,default,cmap,monkeypatch):
|
||||||
monkeypatch.delenv('DISPLAY',raising=False)
|
monkeypatch.delenv('DISPLAY',raising=False)
|
||||||
default.show(cmap)
|
default.show(cmap)
|
||||||
|
|
|
@ -108,7 +108,7 @@ class TestResult:
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',
|
@pytest.mark.parametrize('mode',
|
||||||
['direct',pytest.param('function',marks=pytest.mark.xfail(sys.platform=='darwin',reason='n/a'))])
|
['direct',pytest.param('function',marks=pytest.mark.xfail(sys.platform in ['darwin','win32'], reason='n/a'))])
|
||||||
def test_add_calculation(self,default,tmp_path,mode):
|
def test_add_calculation(self,default,tmp_path,mode):
|
||||||
|
|
||||||
if mode == 'direct':
|
if mode == 'direct':
|
||||||
|
|
|
@ -2,6 +2,7 @@ import os
|
||||||
import filecmp
|
import filecmp
|
||||||
import time
|
import time
|
||||||
import string
|
import string
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -31,6 +32,7 @@ class TestVTK:
|
||||||
print('patched damask.util.execution_stamp')
|
print('patched damask.util.execution_stamp')
|
||||||
|
|
||||||
@pytest.mark.parametrize('cmap',[Colormap.from_predefined('cividis'),'strain'])
|
@pytest.mark.parametrize('cmap',[Colormap.from_predefined('cividis'),'strain'])
|
||||||
|
@pytest.mark.skipif(sys.platform == 'win32', reason='DISPLAY has no effect on windows')
|
||||||
def test_show(sef,default,cmap,monkeypatch):
|
def test_show(sef,default,cmap,monkeypatch):
|
||||||
monkeypatch.delenv('DISPLAY',raising=False)
|
monkeypatch.delenv('DISPLAY',raising=False)
|
||||||
default.show(colormap=cmap)
|
default.show(colormap=cmap)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
import sys
|
||||||
import random
|
import random
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -11,17 +11,20 @@ from damask import util
|
||||||
|
|
||||||
class TestUtil:
|
class TestUtil:
|
||||||
|
|
||||||
def test_execute_direct(self):
|
@pytest.mark.xfail(sys.platform == 'win32', reason='echo is not a Windows command')
|
||||||
out,err = util.execute('echo test')
|
def test_run_direct(self):
|
||||||
|
out,err = util.run('echo test')
|
||||||
assert out=='test\n' and err==''
|
assert out=='test\n' and err==''
|
||||||
|
|
||||||
def test_execute_env(self):
|
@pytest.mark.xfail(sys.platform == 'win32', reason='echo is not a Windows command')
|
||||||
out,err = util.execute('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'})
|
def test_run_env(self):
|
||||||
|
out,err = util.run('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'})
|
||||||
assert out=='test\n' and err==''
|
assert out=='test\n' and err==''
|
||||||
|
|
||||||
def test_execute_runtime_error(self):
|
@pytest.mark.xfail(sys.platform == 'win32', reason='false is not a Windows command')
|
||||||
|
def test_run_runtime_error(self):
|
||||||
with pytest.raises(RuntimeError):
|
with pytest.raises(RuntimeError):
|
||||||
util.execute('false')
|
util.run('false')
|
||||||
|
|
||||||
@pytest.mark.parametrize('input,output',
|
@pytest.mark.parametrize('input,output',
|
||||||
[
|
[
|
||||||
|
@ -125,9 +128,9 @@ class TestUtil:
|
||||||
def test_D3D_base_group(self,tmp_path,complete):
|
def test_D3D_base_group(self,tmp_path,complete):
|
||||||
base_group = ''.join(random.choices('DAMASK', k=10))
|
base_group = ''.join(random.choices('DAMASK', k=10))
|
||||||
with h5py.File(tmp_path/'base_group.dream3d','w') as f:
|
with h5py.File(tmp_path/'base_group.dream3d','w') as f:
|
||||||
f.create_group(os.path.join(base_group,'_SIMPL_GEOMETRY'))
|
f.create_group('/'.join((base_group,'_SIMPL_GEOMETRY')))
|
||||||
if complete:
|
if complete:
|
||||||
f[os.path.join(base_group,'_SIMPL_GEOMETRY')].create_dataset('SPACING',data=np.ones(3))
|
f['/'.join((base_group,'_SIMPL_GEOMETRY'))].create_dataset('SPACING',data=np.ones(3))
|
||||||
|
|
||||||
if complete:
|
if complete:
|
||||||
assert base_group == util.DREAM3D_base_group(tmp_path/'base_group.dream3d')
|
assert base_group == util.DREAM3D_base_group(tmp_path/'base_group.dream3d')
|
||||||
|
@ -141,12 +144,12 @@ class TestUtil:
|
||||||
cell_data_group = ''.join(random.choices('KULeuven', k=10))
|
cell_data_group = ''.join(random.choices('KULeuven', k=10))
|
||||||
cells = np.random.randint(1,50,3)
|
cells = np.random.randint(1,50,3)
|
||||||
with h5py.File(tmp_path/'cell_data_group.dream3d','w') as f:
|
with h5py.File(tmp_path/'cell_data_group.dream3d','w') as f:
|
||||||
f.create_group(os.path.join(base_group,'_SIMPL_GEOMETRY'))
|
f.create_group('/'.join((base_group,'_SIMPL_GEOMETRY')))
|
||||||
f[os.path.join(base_group,'_SIMPL_GEOMETRY')].create_dataset('SPACING',data=np.ones(3))
|
f['/'.join((base_group,'_SIMPL_GEOMETRY'))].create_dataset('SPACING',data=np.ones(3))
|
||||||
f[os.path.join(base_group,'_SIMPL_GEOMETRY')].create_dataset('DIMENSIONS',data=cells[::-1])
|
f['/'.join((base_group,'_SIMPL_GEOMETRY'))].create_dataset('DIMENSIONS',data=cells[::-1])
|
||||||
f[base_group].create_group(cell_data_group)
|
f[base_group].create_group(cell_data_group)
|
||||||
if complete:
|
if complete:
|
||||||
f[os.path.join(base_group,cell_data_group)].create_dataset('data',shape=np.append(cells,1))
|
f['/'.join((base_group,cell_data_group))].create_dataset('data',shape=np.append(cells,1))
|
||||||
|
|
||||||
if complete:
|
if complete:
|
||||||
assert cell_data_group == util.DREAM3D_cell_data_group(tmp_path/'cell_data_group.dream3d')
|
assert cell_data_group == util.DREAM3D_cell_data_group(tmp_path/'cell_data_group.dream3d')
|
||||||
|
|
|
@ -7,7 +7,11 @@
|
||||||
#include <pwd.h>
|
#include <pwd.h>
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include "zlib.h"
|
#include <zlib.h>
|
||||||
|
|
||||||
|
#ifdef FYAML
|
||||||
|
#include <libfyaml.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#define PATHLEN 4096
|
#define PATHLEN 4096
|
||||||
#define STRLEN 256
|
#define STRLEN 256
|
||||||
|
@ -80,3 +84,26 @@ void inflate_c(const uLong *s_deflated, const uLong *s_inflated, const Byte defl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef FYAML
|
||||||
|
void to_flow_c(char **flow, int* length_flow, const char *mixed){
|
||||||
|
struct fy_document *fyd = NULL;
|
||||||
|
enum fy_emitter_cfg_flags emit_flags = FYECF_MODE_FLOW_ONELINE | FYECF_STRIP_LABELS | FYECF_STRIP_TAGS |FYECF_STRIP_DOC;
|
||||||
|
|
||||||
|
fyd = fy_document_build_from_string(NULL, mixed, -1);
|
||||||
|
if (!fyd) {
|
||||||
|
*length_flow = -1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int err = fy_document_resolve(fyd);
|
||||||
|
if (err) {
|
||||||
|
*length_flow = -1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
*flow = fy_emit_document_to_string(fyd,emit_flags);
|
||||||
|
*length_flow = strlen(*flow);
|
||||||
|
|
||||||
|
fy_document_destroy(fyd);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
#define PETSC_MAJOR 3
|
#define PETSC_MAJOR 3
|
||||||
#define PETSC_MINOR_MIN 12
|
#define PETSC_MINOR_MIN 12
|
||||||
#define PETSC_MINOR_MAX 16
|
#define PETSC_MINOR_MAX 17
|
||||||
|
|
||||||
module DAMASK_interface
|
module DAMASK_interface
|
||||||
use, intrinsic :: ISO_fortran_env
|
use, intrinsic :: ISO_fortran_env
|
||||||
|
|
|
@ -48,6 +48,7 @@ module HDF5_utilities
|
||||||
!> @details for parallel IO, all dimension except for the last need to match
|
!> @details for parallel IO, all dimension except for the last need to match
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
interface HDF5_write
|
interface HDF5_write
|
||||||
|
#if defined(__GFORTRAN__) && __GNUC__<11
|
||||||
module procedure HDF5_write_real1
|
module procedure HDF5_write_real1
|
||||||
module procedure HDF5_write_real2
|
module procedure HDF5_write_real2
|
||||||
module procedure HDF5_write_real3
|
module procedure HDF5_write_real3
|
||||||
|
@ -55,7 +56,6 @@ module HDF5_utilities
|
||||||
module procedure HDF5_write_real5
|
module procedure HDF5_write_real5
|
||||||
module procedure HDF5_write_real6
|
module procedure HDF5_write_real6
|
||||||
module procedure HDF5_write_real7
|
module procedure HDF5_write_real7
|
||||||
|
|
||||||
module procedure HDF5_write_int1
|
module procedure HDF5_write_int1
|
||||||
module procedure HDF5_write_int2
|
module procedure HDF5_write_int2
|
||||||
module procedure HDF5_write_int3
|
module procedure HDF5_write_int3
|
||||||
|
@ -63,6 +63,10 @@ module HDF5_utilities
|
||||||
module procedure HDF5_write_int5
|
module procedure HDF5_write_int5
|
||||||
module procedure HDF5_write_int6
|
module procedure HDF5_write_int6
|
||||||
module procedure HDF5_write_int7
|
module procedure HDF5_write_int7
|
||||||
|
#else
|
||||||
|
module procedure HDF5_write_real
|
||||||
|
module procedure HDF5_write_int
|
||||||
|
#endif
|
||||||
end interface HDF5_write
|
end interface HDF5_write
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -1210,6 +1214,7 @@ subroutine HDF5_read_int7(dataset,loc_id,datasetName,parallel)
|
||||||
|
|
||||||
end subroutine HDF5_read_int7
|
end subroutine HDF5_read_int7
|
||||||
|
|
||||||
|
#if defined(__GFORTRAN__) && __GNUC__<11
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief write dataset of type real with 1 dimension
|
!> @brief write dataset of type real with 1 dimension
|
||||||
|
@ -1499,6 +1504,71 @@ subroutine HDF5_write_real7(dataset,loc_id,datasetName,parallel)
|
||||||
|
|
||||||
end subroutine HDF5_write_real7
|
end subroutine HDF5_write_real7
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief write dataset of type real with 1-7 dimension
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine HDF5_write_real(dataset,loc_id,datasetName,parallel)
|
||||||
|
|
||||||
|
real(pReal), intent(in), dimension(..) :: dataset !< data written to file
|
||||||
|
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
||||||
|
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
||||||
|
logical, intent(in), optional :: parallel !< dataset is distributed over multiple processes
|
||||||
|
|
||||||
|
|
||||||
|
integer :: hdferr
|
||||||
|
integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id
|
||||||
|
integer(HSIZE_T), dimension(rank(dataset)) :: &
|
||||||
|
myStart, &
|
||||||
|
myShape, & !< shape of the dataset (this process)
|
||||||
|
totalShape !< shape of the dataset (all processes)
|
||||||
|
|
||||||
|
!---------------------------------------------------------------------------------------------------
|
||||||
|
! determine shape of dataset
|
||||||
|
myShape = int(shape(dataset),HSIZE_T)
|
||||||
|
if (any(myShape(1:size(myShape)-1) == 0)) return !< empty dataset (last dimension can be empty)
|
||||||
|
|
||||||
|
if (present(parallel)) then
|
||||||
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
|
else
|
||||||
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
|
end if
|
||||||
|
|
||||||
|
if (product(totalShape) /= 0) then
|
||||||
|
select rank(dataset)
|
||||||
|
rank (1)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (2)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (3)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (4)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (5)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (6)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank (7)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
end select
|
||||||
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
end if
|
||||||
|
|
||||||
|
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
|
||||||
|
|
||||||
|
end subroutine HDF5_write_real
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Write dataset of type string (scalar).
|
!> @brief Write dataset of type string (scalar).
|
||||||
|
@ -1561,6 +1631,7 @@ subroutine HDF5_write_str(dataset,loc_id,datasetName)
|
||||||
|
|
||||||
end subroutine HDF5_write_str
|
end subroutine HDF5_write_str
|
||||||
|
|
||||||
|
#if defined(__GFORTRAN__) && __GNUC__<11
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief write dataset of type integer with 1 dimension
|
!> @brief write dataset of type integer with 1 dimension
|
||||||
|
@ -1849,6 +1920,70 @@ subroutine HDF5_write_int7(dataset,loc_id,datasetName,parallel)
|
||||||
|
|
||||||
end subroutine HDF5_write_int7
|
end subroutine HDF5_write_int7
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief write dataset of type integer with 1-7 dimension
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine HDF5_write_int(dataset,loc_id,datasetName,parallel)
|
||||||
|
|
||||||
|
integer, intent(in), dimension(..) :: dataset !< data written to file
|
||||||
|
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
||||||
|
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
||||||
|
logical, intent(in), optional :: parallel !< dataset is distributed over multiple processes
|
||||||
|
|
||||||
|
|
||||||
|
integer :: hdferr
|
||||||
|
integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id
|
||||||
|
integer(HSIZE_T), dimension(rank(dataset)) :: &
|
||||||
|
myStart, &
|
||||||
|
myShape, & !< shape of the dataset (this process)
|
||||||
|
totalShape !< shape of the dataset (all processes)
|
||||||
|
|
||||||
|
!---------------------------------------------------------------------------------------------------
|
||||||
|
! determine shape of dataset
|
||||||
|
myShape = int(shape(dataset),HSIZE_T)
|
||||||
|
if (any(myShape(1:size(myShape)-1) == 0)) return !< empty dataset (last dimension can be empty)
|
||||||
|
|
||||||
|
if (present(parallel)) then
|
||||||
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
|
else
|
||||||
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
|
end if
|
||||||
|
|
||||||
|
if (product(totalShape) /= 0) then
|
||||||
|
select rank(dataset)
|
||||||
|
rank(1)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(2)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(3)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(4)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(5)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(6)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
rank(7)
|
||||||
|
call H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
|
||||||
|
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||||
|
end select
|
||||||
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
end if
|
||||||
|
|
||||||
|
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
|
||||||
|
|
||||||
|
end subroutine HDF5_write_int
|
||||||
|
#endif
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief initialize HDF5 handles, determines global shape and start for parallel read
|
!> @brief initialize HDF5 handles, determines global shape and start for parallel read
|
||||||
|
|
|
@ -483,7 +483,9 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
|
||||||
case (701)
|
case (701)
|
||||||
msg = 'Incorrect indent/Null value not allowed'
|
msg = 'Incorrect indent/Null value not allowed'
|
||||||
case (702)
|
case (702)
|
||||||
msg = 'Invalid use of flow yaml'
|
msg = 'Invalid use of flow YAML'
|
||||||
|
case (703)
|
||||||
|
msg = 'Invalid YAML'
|
||||||
case (704)
|
case (704)
|
||||||
msg = 'Space expected after a colon for <key>: <value> pair'
|
msg = 'Space expected after a colon for <key>: <value> pair'
|
||||||
case (705)
|
case (705)
|
||||||
|
|
|
@ -81,7 +81,7 @@ subroutine VTI_readDataset_raw(base64_str,dataType,headerType,compressed, &
|
||||||
character(len=:), allocatable, intent(out) :: dataType, headerType, base64_str
|
character(len=:), allocatable, intent(out) :: dataType, headerType, base64_str
|
||||||
logical, intent(out) :: compressed
|
logical, intent(out) :: compressed
|
||||||
|
|
||||||
logical :: inFile,inImage,gotCellData
|
logical :: inFile, inImage
|
||||||
integer(pI64) :: &
|
integer(pI64) :: &
|
||||||
startPos, endPos, &
|
startPos, endPos, &
|
||||||
s
|
s
|
||||||
|
@ -152,10 +152,9 @@ subroutine VTI_readCellsSizeOrigin(cells,geomSize,origin, &
|
||||||
fileContent
|
fileContent
|
||||||
|
|
||||||
character(len=:), allocatable :: dataType, headerType
|
character(len=:), allocatable :: dataType, headerType
|
||||||
logical :: inFile,inImage,gotCellData,compressed
|
logical :: inFile, inImage, compressed
|
||||||
integer(pI64) :: &
|
integer(pI64) :: &
|
||||||
startPos, endPos, &
|
startPos, endPos
|
||||||
s
|
|
||||||
|
|
||||||
|
|
||||||
cells = -1
|
cells = -1
|
||||||
|
|
|
@ -8,6 +8,9 @@ module YAML_parse
|
||||||
use prec
|
use prec
|
||||||
use IO
|
use IO
|
||||||
use YAML_types
|
use YAML_types
|
||||||
|
#ifdef FYAML
|
||||||
|
use system_routines
|
||||||
|
#endif
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
private
|
private
|
||||||
|
@ -16,14 +19,34 @@ module YAML_parse
|
||||||
YAML_parse_init, &
|
YAML_parse_init, &
|
||||||
YAML_parse_str
|
YAML_parse_str
|
||||||
|
|
||||||
|
#ifdef FYAML
|
||||||
|
interface
|
||||||
|
|
||||||
|
subroutine to_flow_C(flow,length_flow,mixed) bind(C)
|
||||||
|
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR, C_PTR
|
||||||
|
|
||||||
|
type(C_PTR), intent(out) :: flow
|
||||||
|
integer(C_INT), intent(out) :: length_flow
|
||||||
|
character(kind=C_CHAR), dimension(*), intent(in) :: mixed
|
||||||
|
end subroutine to_flow_C
|
||||||
|
|
||||||
|
end interface
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Do sanity checks.
|
!> @brief Do sanity checks.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine YAML_parse_init
|
subroutine YAML_parse_init()
|
||||||
|
|
||||||
call selfTest
|
print'(/,1x,a)', '<<<+- YAML_parse init -+>>>'
|
||||||
|
#ifdef FYAML
|
||||||
|
print'(/,1x,a)', 'libfyaml powered'
|
||||||
|
#else
|
||||||
|
call selfTest()
|
||||||
|
#endif
|
||||||
|
|
||||||
end subroutine YAML_parse_init
|
end subroutine YAML_parse_init
|
||||||
|
|
||||||
|
@ -60,7 +83,7 @@ recursive function parse_flow(YAML_flow) result(node)
|
||||||
s, & ! start position of dictionary or list
|
s, & ! start position of dictionary or list
|
||||||
d ! position of key: value separator (':')
|
d ! position of key: value separator (':')
|
||||||
|
|
||||||
flow_string = trim(adjustl(YAML_flow(:)))
|
flow_string = trim(adjustl(YAML_flow))
|
||||||
if (len_trim(flow_string) == 0) then
|
if (len_trim(flow_string) == 0) then
|
||||||
node => emptyDict
|
node => emptyDict
|
||||||
return
|
return
|
||||||
|
@ -145,8 +168,11 @@ logical function quotedString(line)
|
||||||
|
|
||||||
character(len=*), intent(in) :: line
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
|
||||||
quotedString = .false.
|
quotedString = .false.
|
||||||
|
|
||||||
|
if (len(line) == 0) return
|
||||||
|
|
||||||
if (scan(line(:1),IO_QUOTES) == 1) then
|
if (scan(line(:1),IO_QUOTES) == 1) then
|
||||||
quotedString = .true.
|
quotedString = .true.
|
||||||
if(line(len(line):len(line)) /= line(:1)) call IO_error(710,ext_msg=line)
|
if(line(len(line):len(line)) /= line(:1)) call IO_error(710,ext_msg=line)
|
||||||
|
@ -155,8 +181,37 @@ logical function quotedString(line)
|
||||||
end function quotedString
|
end function quotedString
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef FYAML
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! @brief Returns Indentation.
|
! @brief Convert all block style YAML parts to flow style.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function to_flow(mixed) result(flow)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: mixed
|
||||||
|
character(:,C_CHAR), allocatable :: flow
|
||||||
|
|
||||||
|
type(C_PTR) :: str_ptr
|
||||||
|
integer(C_INT) :: strlen
|
||||||
|
|
||||||
|
|
||||||
|
call to_flow_C(str_ptr,strlen,f_c_string(mixed))
|
||||||
|
if (strlen < 1) call IO_error(703,ext_msg='libyfaml')
|
||||||
|
allocate(character(len=strlen,kind=c_char) :: flow)
|
||||||
|
|
||||||
|
block
|
||||||
|
character(len=strlen,kind=c_char), pointer :: s
|
||||||
|
call c_f_pointer(str_ptr,s)
|
||||||
|
flow = s(:len(s)-1)
|
||||||
|
end block
|
||||||
|
|
||||||
|
call free_C(str_ptr)
|
||||||
|
|
||||||
|
end function to_flow
|
||||||
|
|
||||||
|
|
||||||
|
#else
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief Determine Indentation.
|
||||||
! @details It determines the indentation level for a given block/line.
|
! @details It determines the indentation level for a given block/line.
|
||||||
! In cases for nested lists, an offset is added to determine the indent of the item block (skip
|
! In cases for nested lists, an offset is added to determine the indent of the item block (skip
|
||||||
! leading dashes)
|
! leading dashes)
|
||||||
|
@ -737,7 +792,7 @@ end subroutine
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! @brief convert all block style YAML parts to flow style
|
! @brief Convert all block style YAML parts to flow style.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function to_flow(blck)
|
function to_flow(blck)
|
||||||
|
|
||||||
|
@ -921,5 +976,6 @@ subroutine selfTest
|
||||||
end block basic_mixed
|
end block basic_mixed
|
||||||
|
|
||||||
end subroutine selfTest
|
end subroutine selfTest
|
||||||
|
#endif
|
||||||
|
|
||||||
end module YAML_parse
|
end module YAML_parse
|
||||||
|
|
|
@ -119,7 +119,8 @@ module YAML_types
|
||||||
|
|
||||||
type, extends(tNode), public :: tList
|
type, extends(tNode), public :: tList
|
||||||
|
|
||||||
class(tItem), pointer :: first => NULL()
|
class(tItem), pointer :: first => NULL(), &
|
||||||
|
last => NULL()
|
||||||
|
|
||||||
contains
|
contains
|
||||||
procedure :: asFormattedString => tList_asFormattedString
|
procedure :: asFormattedString => tList_asFormattedString
|
||||||
|
@ -144,7 +145,7 @@ module YAML_types
|
||||||
end type tDict
|
end type tDict
|
||||||
|
|
||||||
|
|
||||||
type :: tItem
|
type, public :: tItem
|
||||||
character(len=:), allocatable :: key
|
character(len=:), allocatable :: key
|
||||||
class(tNode), pointer :: node => NULL()
|
class(tNode), pointer :: node => NULL()
|
||||||
class(tItem), pointer :: next => NULL()
|
class(tItem), pointer :: next => NULL()
|
||||||
|
@ -1348,15 +1349,13 @@ subroutine tList_append(self,node)
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
|
|
||||||
if (.not. associated(self%first)) then
|
if (.not. associated(self%first)) then
|
||||||
allocate(self%first)
|
allocate(item)
|
||||||
item => self%first
|
self%first => item
|
||||||
|
self%last => item
|
||||||
else
|
else
|
||||||
item => self%first
|
allocate(self%last%next)
|
||||||
do while (associated(item%next))
|
item => self%last%next
|
||||||
item => item%next
|
self%last => item
|
||||||
enddo
|
|
||||||
allocate(item%next)
|
|
||||||
item => item%next
|
|
||||||
end if
|
end if
|
||||||
|
|
||||||
item%node => node
|
item%node => node
|
||||||
|
|
|
@ -142,7 +142,7 @@ contains
|
||||||
!> level chosen.
|
!> level chosen.
|
||||||
!> Initializes FFTW.
|
!> Initializes FFTW.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine spectral_utilities_init
|
subroutine spectral_utilities_init()
|
||||||
|
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
integer :: i, j, k, &
|
integer :: i, j, k, &
|
||||||
|
@ -350,6 +350,8 @@ subroutine spectral_utilities_init
|
||||||
allocate (gamma_hat(3,3,3,3,cells1Red,cells(2),cells3), source = cmplx(0.0_pReal,0.0_pReal,pReal))
|
allocate (gamma_hat(3,3,3,3,cells1Red,cells(2),cells3), source = cmplx(0.0_pReal,0.0_pReal,pReal))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
call selfTest()
|
||||||
|
|
||||||
end subroutine spectral_utilities_init
|
end subroutine spectral_utilities_init
|
||||||
|
|
||||||
|
|
||||||
|
@ -1146,4 +1148,41 @@ subroutine utilities_saveReferenceStiffness
|
||||||
|
|
||||||
end subroutine utilities_saveReferenceStiffness
|
end subroutine utilities_saveReferenceStiffness
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Check correctness of forward-backward transform.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine selfTest()
|
||||||
|
|
||||||
|
real(pReal), allocatable, dimension(:,:,:,:,:) :: tensorField_real_
|
||||||
|
real(pReal), allocatable, dimension(:,:,:,:) :: vectorField_real_
|
||||||
|
real(pReal), allocatable, dimension(:,:,:) :: scalarField_real_
|
||||||
|
|
||||||
|
|
||||||
|
call random_number(tensorField_real)
|
||||||
|
tensorField_real(1:3,1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
tensorField_real_ = tensorField_real
|
||||||
|
call utilities_FFTtensorForward()
|
||||||
|
call utilities_FFTtensorBackward()
|
||||||
|
tensorField_real(1:3,1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
if (maxval(abs(tensorField_real_ - tensorField_real))>5.0e-15_pReal) error stop 'tensorField'
|
||||||
|
|
||||||
|
call random_number(vectorField_real)
|
||||||
|
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
vectorField_real_ = vectorField_real
|
||||||
|
call utilities_FFTvectorForward()
|
||||||
|
call utilities_FFTvectorBackward()
|
||||||
|
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
if (maxval(abs(vectorField_real_ - vectorField_real))>5.0e-15_pReal) error stop 'vectorField'
|
||||||
|
|
||||||
|
call random_number(scalarField_real)
|
||||||
|
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
scalarField_real_ = scalarField_real
|
||||||
|
call utilities_FFTscalarForward()
|
||||||
|
call utilities_FFTscalarBackward()
|
||||||
|
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
|
||||||
|
if (maxval(abs(scalarField_real_ - scalarField_real))>5.0e-15_pReal) error stop 'scalarField'
|
||||||
|
|
||||||
|
end subroutine selfTest
|
||||||
|
|
||||||
end module spectral_utilities
|
end module spectral_utilities
|
||||||
|
|
112
src/material.f90
112
src/material.f90
|
@ -91,9 +91,13 @@ subroutine parse()
|
||||||
homogenizations, &
|
homogenizations, &
|
||||||
homogenization
|
homogenization
|
||||||
|
|
||||||
|
class(tItem), pointer :: item
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
counterPhase, &
|
counterPhase, &
|
||||||
counterHomogenization
|
counterHomogenization, &
|
||||||
|
ho_of
|
||||||
|
integer, dimension(:,:), allocatable :: ph_of
|
||||||
|
real(pReal), dimension(:,:), allocatable :: v_of
|
||||||
|
|
||||||
real(pReal) :: v
|
real(pReal) :: v
|
||||||
integer :: &
|
integer :: &
|
||||||
|
@ -102,11 +106,14 @@ subroutine parse()
|
||||||
co, ce, &
|
co, ce, &
|
||||||
ma
|
ma
|
||||||
|
|
||||||
|
|
||||||
materials => config_material%get('material')
|
materials => config_material%get('material')
|
||||||
phases => config_material%get('phase')
|
phases => config_material%get('phase')
|
||||||
homogenizations => config_material%get('homogenization')
|
homogenizations => config_material%get('homogenization')
|
||||||
|
|
||||||
call sanityCheck(materials, homogenizations)
|
|
||||||
|
if (maxval(discretization_materialAt) > materials%length) &
|
||||||
|
call IO_error(155,ext_msg='More materials requested than found in material.yaml')
|
||||||
|
|
||||||
#if defined (__GFORTRAN__)
|
#if defined (__GFORTRAN__)
|
||||||
material_name_phase = getKeys(phases)
|
material_name_phase = getKeys(phases)
|
||||||
|
@ -123,6 +130,49 @@ subroutine parse()
|
||||||
end do
|
end do
|
||||||
homogenization_maxNconstituents = maxval(homogenization_Nconstituents)
|
homogenization_maxNconstituents = maxval(homogenization_Nconstituents)
|
||||||
|
|
||||||
|
allocate(material_v(homogenization_maxNconstituents,discretization_Ncells),source=0.0_pReal)
|
||||||
|
|
||||||
|
allocate(material_O_0(materials%length))
|
||||||
|
allocate(material_F_i_0(materials%length))
|
||||||
|
|
||||||
|
allocate(ho_of(materials%length))
|
||||||
|
allocate(ph_of(materials%length,homogenization_maxNconstituents),source=-1)
|
||||||
|
allocate( v_of(materials%length,homogenization_maxNconstituents),source=0.0_pReal)
|
||||||
|
|
||||||
|
! parse YAML structure
|
||||||
|
select type(materials)
|
||||||
|
|
||||||
|
class is(tList)
|
||||||
|
|
||||||
|
item => materials%first
|
||||||
|
do ma = 1, materials%length
|
||||||
|
material => item%node
|
||||||
|
ho_of(ma) = homogenizations%getIndex(material%get_asString('homogenization'))
|
||||||
|
constituents => material%get('constituents')
|
||||||
|
|
||||||
|
homogenization => homogenizations%get(ho_of(ma))
|
||||||
|
if (constituents%length /= homogenization%get_asInt('N_constituents')) call IO_error(148)
|
||||||
|
|
||||||
|
allocate(material_O_0(ma)%data(constituents%length))
|
||||||
|
allocate(material_F_i_0(ma)%data(1:3,1:3,constituents%length))
|
||||||
|
|
||||||
|
do co = 1, constituents%length
|
||||||
|
constituent => constituents%get(co)
|
||||||
|
v_of(ma,co) = constituent%get_asFloat('v')
|
||||||
|
ph_of(ma,co) = phases%getIndex(constituent%get_asString('phase'))
|
||||||
|
|
||||||
|
call material_O_0(ma)%data(co)%fromQuaternion(constituent%get_as1dFloat('O',requiredSize=4))
|
||||||
|
material_F_i_0(ma)%data(1:3,1:3,co) = constituent%get_as2dFloat('F_i',defaultVal=math_I3,requiredShape=[3,3])
|
||||||
|
|
||||||
|
end do
|
||||||
|
if (dNeq(sum(v_of(ma,:)),1.0_pReal,1.e-9_pReal)) call IO_error(153,ext_msg='constituent')
|
||||||
|
|
||||||
|
item => item%next
|
||||||
|
end do
|
||||||
|
|
||||||
|
end select
|
||||||
|
|
||||||
|
|
||||||
allocate(counterPhase(phases%length),source=0)
|
allocate(counterPhase(phases%length),source=0)
|
||||||
allocate(counterHomogenization(homogenizations%length),source=0)
|
allocate(counterHomogenization(homogenizations%length),source=0)
|
||||||
|
|
||||||
|
@ -132,12 +182,13 @@ subroutine parse()
|
||||||
allocate(material_phaseID(homogenization_maxNconstituents,discretization_Ncells),source=0)
|
allocate(material_phaseID(homogenization_maxNconstituents,discretization_Ncells),source=0)
|
||||||
allocate(material_phaseEntry(homogenization_maxNconstituents,discretization_Ncells),source=0)
|
allocate(material_phaseEntry(homogenization_maxNconstituents,discretization_Ncells),source=0)
|
||||||
|
|
||||||
allocate(material_v(homogenization_maxNconstituents,discretization_Ncells),source=0.0_pReal)
|
|
||||||
|
|
||||||
|
! build mappings
|
||||||
do el = 1, discretization_Nelems
|
do el = 1, discretization_Nelems
|
||||||
material => materials%get(discretization_materialAt(el))
|
|
||||||
|
|
||||||
ho = homogenizations%getIndex(material%get_asString('homogenization'))
|
ma = discretization_materialAt(el)
|
||||||
|
ho = ho_of(ma)
|
||||||
|
|
||||||
do ip = 1, discretization_nIPs
|
do ip = 1, discretization_nIPs
|
||||||
ce = (el-1)*discretization_nIPs + ip
|
ce = (el-1)*discretization_nIPs + ip
|
||||||
material_homogenizationID(ce) = ho
|
material_homogenizationID(ce) = ho
|
||||||
|
@ -145,13 +196,11 @@ subroutine parse()
|
||||||
material_homogenizationEntry(ce) = counterHomogenization(ho)
|
material_homogenizationEntry(ce) = counterHomogenization(ho)
|
||||||
end do
|
end do
|
||||||
|
|
||||||
constituents => material%get('constituents')
|
do co = 1, size(ph_of(ma,:)>0)
|
||||||
do co = 1, constituents%length
|
|
||||||
constituent => constituents%get(co)
|
|
||||||
|
|
||||||
v = constituent%get_asFloat('v')
|
v = v_of(ma,co)
|
||||||
|
ph = ph_of(ma,co)
|
||||||
|
|
||||||
ph = phases%getIndex(constituent%get_asString('phase'))
|
|
||||||
do ip = 1, discretization_nIPs
|
do ip = 1, discretization_nIPs
|
||||||
ce = (el-1)*discretization_nIPs + ip
|
ce = (el-1)*discretization_nIPs + ip
|
||||||
material_phaseID(co,ce) = ph
|
material_phaseID(co,ce) = ph
|
||||||
|
@ -161,54 +210,11 @@ subroutine parse()
|
||||||
end do
|
end do
|
||||||
|
|
||||||
end do
|
end do
|
||||||
if (dNeq(sum(material_v(1:constituents%length,ce)),1.0_pReal,1.e-9_pReal)) &
|
|
||||||
call IO_error(153,ext_msg='constituent')
|
|
||||||
|
|
||||||
end do
|
end do
|
||||||
|
|
||||||
allocate(material_O_0(materials%length))
|
|
||||||
allocate(material_F_i_0(materials%length))
|
|
||||||
|
|
||||||
do ma = 1, materials%length
|
|
||||||
material => materials%get(ma)
|
|
||||||
constituents => material%get('constituents')
|
|
||||||
allocate(material_O_0(ma)%data(constituents%length))
|
|
||||||
allocate(material_F_i_0(ma)%data(1:3,1:3,constituents%length))
|
|
||||||
do co = 1, constituents%length
|
|
||||||
constituent => constituents%get(co)
|
|
||||||
call material_O_0(ma)%data(co)%fromQuaternion(constituent%get_as1dFloat('O',requiredSize=4))
|
|
||||||
material_F_i_0(ma)%data(1:3,1:3,co) = constituent%get_as2dFloat('F_i',defaultVal=math_I3,requiredShape=[3,3])
|
|
||||||
enddo
|
|
||||||
enddo
|
|
||||||
|
|
||||||
end subroutine parse
|
end subroutine parse
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
!> @brief Check if material.yaml is consistent and contains sufficient # of materials
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
subroutine sanityCheck(materials,homogenizations)
|
|
||||||
|
|
||||||
class(tNode), intent(in) :: materials, &
|
|
||||||
homogenizations
|
|
||||||
|
|
||||||
class(tNode), pointer :: material, &
|
|
||||||
homogenization, &
|
|
||||||
constituents
|
|
||||||
integer :: m
|
|
||||||
|
|
||||||
if (maxval(discretization_materialAt) > materials%length) &
|
|
||||||
call IO_error(155,ext_msg='More materials requested than found in material.yaml')
|
|
||||||
|
|
||||||
do m = 1, materials%length
|
|
||||||
material => materials%get(m)
|
|
||||||
constituents => material%get('constituents')
|
|
||||||
homogenization => homogenizations%get(material%get_asString('homogenization'))
|
|
||||||
if (constituents%length /= homogenization%get_asInt('N_constituents')) call IO_error(148)
|
|
||||||
end do
|
|
||||||
|
|
||||||
end subroutine sanityCheck
|
|
||||||
|
|
||||||
#if defined (__GFORTRAN__)
|
#if defined (__GFORTRAN__)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief %keys() is broken on gfortran
|
!> @brief %keys() is broken on gfortran
|
||||||
|
|
|
@ -100,7 +100,11 @@ subroutine discretization_mesh_init(restart)
|
||||||
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
||||||
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
||||||
|
|
||||||
|
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>16)
|
||||||
|
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,'n/a',PETSC_TRUE,globalMesh,err_PETSc)
|
||||||
|
#else
|
||||||
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,err_PETSc)
|
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,err_PETSc)
|
||||||
|
#endif
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetDimension(globalMesh,dimPlex,err_PETSc)
|
call DMGetDimension(globalMesh,dimPlex,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
|
@ -48,7 +48,7 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Report precision and do self test.
|
!> @brief Report precision and do self test.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine prec_init
|
subroutine prec_init()
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- prec init -+>>>'
|
print'(/,1x,a)', '<<<+- prec init -+>>>'
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ subroutine prec_init
|
||||||
print'( a,e10.3)', ' epsilon value: ',PREAL_EPSILON
|
print'( a,e10.3)', ' epsilon value: ',PREAL_EPSILON
|
||||||
print'( a,i3)', ' decimal precision: ',precision(0.0_pReal)
|
print'( a,i3)', ' decimal precision: ',precision(0.0_pReal)
|
||||||
|
|
||||||
call selfTest
|
call selfTest()
|
||||||
|
|
||||||
end subroutine prec_init
|
end subroutine prec_init
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ end function prec_bytesToC_INT64_T
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Check correctness of some prec functions.
|
!> @brief Check correctness of some prec functions.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine selfTest
|
subroutine selfTest()
|
||||||
|
|
||||||
integer, allocatable, dimension(:) :: realloc_lhs_test
|
integer, allocatable, dimension(:) :: realloc_lhs_test
|
||||||
real(pReal), dimension(1) :: f
|
real(pReal), dimension(1) :: f
|
||||||
|
|
|
@ -658,11 +658,7 @@ function om2ax(om) result(ax)
|
||||||
else
|
else
|
||||||
call dgeev('N','V',3,om_,3,Wr,Wi,devNull,3,VR,3,work,size(work,1),ierr)
|
call dgeev('N','V',3,om_,3,Wr,Wi,devNull,3,VR,3,work,size(work,1),ierr)
|
||||||
if (ierr /= 0) error stop 'LAPACK error'
|
if (ierr /= 0) error stop 'LAPACK error'
|
||||||
#if defined(__GFORTRAN__) && __GNUC__<9
|
|
||||||
i = maxloc(merge(1,0,cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal)),dim=1)
|
|
||||||
#else
|
|
||||||
i = findloc(cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal),.true.,dim=1) !find eigenvalue (1,0)
|
i = findloc(cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal),.true.,dim=1) !find eigenvalue (1,0)
|
||||||
#endif
|
|
||||||
if (i == 0) error stop 'om2ax conversion failed'
|
if (i == 0) error stop 'om2ax conversion failed'
|
||||||
ax(1:3) = VR(1:3,i)
|
ax(1:3) = VR(1:3,i)
|
||||||
where ( dNeq0([om(2,3)-om(3,2), om(3,1)-om(1,3), om(1,2)-om(2,1)])) &
|
where ( dNeq0([om(2,3)-om(3,2), om(3,1)-om(1,3), om(1,2)-om(2,1)])) &
|
||||||
|
@ -1427,10 +1423,6 @@ subroutine selfTest()
|
||||||
|
|
||||||
do i = 1, 20
|
do i = 1, 20
|
||||||
|
|
||||||
#if defined(__GFORTRAN__) && __GNUC__<9
|
|
||||||
if(i<7) cycle
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if(i==1) then
|
if(i==1) then
|
||||||
qu = om2qu(math_I3)
|
qu = om2qu(math_I3)
|
||||||
elseif(i==2) then
|
elseif(i==2) then
|
||||||
|
|
|
@ -17,59 +17,67 @@ module system_routines
|
||||||
getUserName, &
|
getUserName, &
|
||||||
signalterm_C, &
|
signalterm_C, &
|
||||||
signalusr1_C, &
|
signalusr1_C, &
|
||||||
signalusr2_C
|
signalusr2_C, &
|
||||||
|
f_c_string, &
|
||||||
|
free_C
|
||||||
|
|
||||||
|
|
||||||
interface
|
interface
|
||||||
|
|
||||||
function setCWD_C(cwd) bind(C)
|
function setCWD_C(cwd) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
||||||
|
|
||||||
integer(C_INT) :: setCWD_C
|
integer(C_INT) :: setCWD_C
|
||||||
character(kind=C_CHAR), dimension(*), intent(in) :: cwd
|
character(kind=C_CHAR), dimension(*), intent(in) :: cwd
|
||||||
end function setCWD_C
|
end function setCWD_C
|
||||||
|
|
||||||
subroutine getCWD_C(cwd, stat) bind(C)
|
subroutine getCWD_C(cwd, stat) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
||||||
use prec
|
use prec
|
||||||
|
|
||||||
character(kind=C_CHAR), dimension(pPathLen+1), intent(out) :: cwd ! NULL-terminated array
|
character(kind=C_CHAR), dimension(pPathLen+1), intent(out) :: cwd ! NULL-terminated array
|
||||||
integer(C_INT), intent(out) :: stat
|
integer(C_INT), intent(out) :: stat
|
||||||
end subroutine getCWD_C
|
end subroutine getCWD_C
|
||||||
|
|
||||||
subroutine getHostName_C(hostname, stat) bind(C)
|
subroutine getHostName_C(hostname, stat) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
||||||
use prec
|
use prec
|
||||||
|
|
||||||
character(kind=C_CHAR), dimension(pStringLen+1), intent(out) :: hostname ! NULL-terminated array
|
character(kind=C_CHAR), dimension(pStringLen+1), intent(out) :: hostname ! NULL-terminated array
|
||||||
integer(C_INT), intent(out) :: stat
|
integer(C_INT), intent(out) :: stat
|
||||||
end subroutine getHostName_C
|
end subroutine getHostName_C
|
||||||
|
|
||||||
subroutine getUserName_C(username, stat) bind(C)
|
subroutine getUserName_C(username, stat) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
use, intrinsic :: ISO_C_Binding, only: C_INT, C_CHAR
|
||||||
use prec
|
use prec
|
||||||
|
|
||||||
character(kind=C_CHAR), dimension(pStringLen+1), intent(out) :: username ! NULL-terminated array
|
character(kind=C_CHAR), dimension(pStringLen+1), intent(out) :: username ! NULL-terminated array
|
||||||
integer(C_INT), intent(out) :: stat
|
integer(C_INT), intent(out) :: stat
|
||||||
end subroutine getUserName_C
|
end subroutine getUserName_C
|
||||||
|
|
||||||
subroutine signalterm_C(handler) bind(C)
|
subroutine signalterm_C(handler) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
||||||
|
|
||||||
type(C_FUNPTR), intent(in), value :: handler
|
type(C_FUNPTR), intent(in), value :: handler
|
||||||
end subroutine signalterm_C
|
end subroutine signalterm_C
|
||||||
|
|
||||||
subroutine signalusr1_C(handler) bind(C)
|
subroutine signalusr1_C(handler) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
||||||
|
|
||||||
type(C_FUNPTR), intent(in), value :: handler
|
type(C_FUNPTR), intent(in), value :: handler
|
||||||
end subroutine signalusr1_C
|
end subroutine signalusr1_C
|
||||||
|
|
||||||
subroutine signalusr2_C(handler) bind(C)
|
subroutine signalusr2_C(handler) bind(C)
|
||||||
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
use, intrinsic :: ISO_C_Binding, only: C_FUNPTR
|
||||||
|
|
||||||
|
type(C_FUNPTR), intent(in), value :: handler
|
||||||
|
end subroutine signalusr2_C
|
||||||
|
|
||||||
|
subroutine free_C(ptr) bind(C,name='free')
|
||||||
|
import c_ptr
|
||||||
|
type(c_ptr), value :: ptr
|
||||||
|
end subroutine free_C
|
||||||
|
|
||||||
type(C_FUNPTR), intent(in), value :: handler
|
|
||||||
end subroutine signalusr2_C
|
|
||||||
|
|
||||||
end interface
|
end interface
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue