diff --git a/src/grid/spectral_utilities.f90 b/src/grid/spectral_utilities.f90 index 10915c5e4..fafc1feda 100644 --- a/src/grid/spectral_utilities.f90 +++ b/src/grid/spectral_utilities.f90 @@ -174,8 +174,6 @@ subroutine spectral_utilities_init() num_grid => num_solver%get_dict('grid',defaultVal=emptyDict) num_grid_fft => num_grid%get_dict('FFT',defaultVal=emptyDict) - call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc) - CHKERRQ(err_PETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,& num_grid%get_asStr('PETSc_options',defaultVal=''),err_PETSc) CHKERRQ(err_PETSc) diff --git a/src/mesh/FEM_utilities.f90 b/src/mesh/FEM_utilities.f90 index 3af7b77ea..b1c218172 100644 --- a/src/mesh/FEM_utilities.f90 +++ b/src/mesh/FEM_utilities.f90 @@ -120,8 +120,6 @@ subroutine FEM_utilities_init(num_mesh) call IO_error(821,ext_msg='integration order (p_i) out of bounds') flush(IO_STDOUT) - call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc) - CHKERRQ(err_PETSc) petsc_options = misc_prefixOptions('-snes_type newtonls & &-snes_linesearch_type cp -snes_ksp_ew & diff --git a/src/parallelization.f90 b/src/parallelization.f90 index 46b94af53..20ab75336 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -62,8 +62,9 @@ subroutine parallelization_init() !$ integer :: got_env, threadLevel !$ integer(pI32) :: OMP_NUM_THREADS !$ character(len=6) NumThreadsString - PetscErrorCode :: err_PETSc + + #ifdef _OPENMP ! If openMP is enabled, check if the MPI libary supports it and initialize accordingly. call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err_MPI) @@ -74,11 +75,7 @@ subroutine parallelization_init() if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI init failed' #endif -#if defined(DEBUG) - call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc) -#else call PetscInitializeNoArguments(err_PETSc) -#endif CHKERRQ(err_PETSc) #if defined(DEBUG) && defined(__INTEL_COMPILER) @@ -88,6 +85,9 @@ subroutine parallelization_init() #endif CHKERRQ(err_PETSc) + call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc) + CHKERRQ(err_PETSc) + call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) & error stop 'Could not determine worldrank'