diff --git a/.gitlab-ci/default.yml b/.gitlab-ci/default.yml index 7c622a8f7d9f63dc5210dcf4dc0b7c4a3e3b9225..a2c18f1c7c6aca3a47748bd9e1396671be6af327 100644 --- a/.gitlab-ci/default.yml +++ b/.gitlab-ci/default.yml @@ -146,6 +146,7 @@ test cpp: variables: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + DUMUX_NUM_THREADS: 4 script: - | pushd build-cmake @@ -168,6 +169,7 @@ test python: variables: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + DUMUX_NUM_THREADS: 4 script: # restore Python virtual env from cache (job:configure artifacts) (Dune 2.9) - | diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d780782266d70ad754924cf54da32d60ebe1951..838ba967d2d243a34794861b9bcc56bd05c27d24 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,20 @@ Differences Between DuMu<sup>x</sup> 3.5 and DuMu<sup>x</sup> 3.4 ### Improvements and Enhancements +- __ParallelFor and multithreaded assembly__: For single domain applications using Box of CCTpfa discretization, + the possibility for multithreaded (thread-parallel) assembly has been added. It is using the newly added + `Dumux::parallelFor` interface. This features requires a backend to be available. + This can be either external backends (TBB, Kokkos) or a working C++ STL parallel algorithms setup. + The backend will be selected automatically, if found. You can also specify the backend by setting the + compiler definition `DUMUX_MULTITHREADING_BACKEND=TBB,Cpp,Kokkos,Serial`, where Serial forces to always run in serial. + For the assembly, you can explicitly turn multithreading off setting the runtime parameter `Assembly.Multithreading = false`. + If a backend is available and the discretization allows it, the default is multithreaded assembly. + When using TBB or Kokkos is it required (recommended) to use `Dumux::initialize` in the main file. + +- __initialize__: A new control function `Dumux::initialize` has been added which initializes shared and distributed + memory parallelism helpers. It is recommended (and may be required for multithreaded applications) to use `Dumux::initialize` + instead of manually initializing the `Dune::MPIHelper`. + - __Discretization tags__: We introduced tags in the namespace `DiscretizationMethods` (with s) for each discretization method. These tags replace the `enum class DiscretizationMethod`. Tags have several advantages over the enum. Each tag is a named type (see `dumux/common/tag.hh`) so they can for example be used in tag dispatch. Moreover specializing with tags is extensible. diff --git a/CMakeLists.txt b/CMakeLists.txt index 45da9af9b5417da7cdbae7ae93b371348baee442..560508e4aadb5974eb1adae2a7091207af67a9f5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -17,6 +17,7 @@ find_package(dune-common) list(APPEND CMAKE_MODULE_PATH ${dune-common_MODULE_PATH} "${PROJECT_SOURCE_DIR}/cmake/modules") + #include the dune macros include(DuneMacros) diff --git a/cmake/modules/AddKokkosFlags.cmake b/cmake/modules/AddKokkosFlags.cmake new file mode 100644 index 0000000000000000000000000000000000000000..38714cc17db956516f9072c22b11a55451e4b676 --- /dev/null +++ b/cmake/modules/AddKokkosFlags.cmake @@ -0,0 +1,12 @@ +include_guard(GLOBAL) + +# set variable for config.h +set(HAVE_KOKKOS ${Kokkos_FOUND}) + +# perform DUNE-specific setup tasks +if (Kokkos_FOUND) + dune_register_package_flags( + COMPILE_DEFINITIONS ENABLE_KOKKOS=1 + LIBRARIES Kokkos::kokkos + ) +endif() diff --git a/cmake/modules/AddOpenMPFlags.cmake b/cmake/modules/AddOpenMPFlags.cmake new file mode 100644 index 0000000000000000000000000000000000000000..c96c92b4c375ab0561af0358cda4280cda9e248c --- /dev/null +++ b/cmake/modules/AddOpenMPFlags.cmake @@ -0,0 +1,12 @@ +include_guard(GLOBAL) + +# set variable for config.h +set(HAVE_OPENMP ${OpenMP_FOUND}) + +# perform DUNE-specific setup tasks +if (OpenMP_FOUND) + dune_register_package_flags( + COMPILE_DEFINITIONS ENABLE_OPENMP=1 + LIBRARIES OpenMP::OpenMP_CXX + ) +endif() diff --git a/cmake/modules/DumuxMacros.cmake b/cmake/modules/DumuxMacros.cmake index 80738630ae6f5adfcc87cc8774d63458afe16c5e..21299d9aa5d90cd6946be2de9580ddcf4ee64533 100644 --- a/cmake/modules/DumuxMacros.cmake +++ b/cmake/modules/DumuxMacros.cmake @@ -13,3 +13,78 @@ find_package(NLOPT QUIET) find_package(PTScotch QUIET) include(AddPTScotchFlags) find_package(PVPython QUIET) + +find_package(Kokkos QUIET) +include(AddKokkosFlags) + +# possibly link against TBB +# even if an older version is found +# otherwise we get linker errors +# beacuse of inconsistencies with +# dune-common's TBB setup +find_package(TBB) +include(AddTBBFlags) + +# in a second step make sure the +# minimum TBB version required is found +set(DUMUX_MIN_TBB_VERSION 2021) +if(TBB_FOUND) + if(TBB_VERSION_MAJOR VERSION_LESS DUMUX_MIN_TBB_VERSION) + find_package(TBB ${DUMUX_MIN_TBB_VERSION}) + # disable TBB manually if required version not found + if(NOT TBB_FOUND) + message(STATUS "Disabling TBB since version requirement not satisfied (>= ${DUMUX_MIN_TBB_VERSION}).") + set(ENABLE_TBB FALSE) + set(HAVE_TBB FALSE) + endif() + endif() +endif() + +find_package(OpenMP QUIET) +include(AddOpenMPFlags) + +# test if we can use parallel algorithms +check_cxx_symbol_exists( + "std::execution::par_unseq" + "execution" + DUMUX_HAVE_CXX_EXECUTION_POLICY +) + +if(DUMUX_HAVE_CXX_EXECUTION_POLICY) + set(HAVE_CPP_PARALLEL_ALGORITHMS TRUE) +endif() + +# setup multithreading backend +if(NOT DUMUX_MULTITHREADING_BACKEND) + if(TBB_FOUND) + set(DUMUX_MULTITHREADING_BACKEND "TBB" CACHE STRING "The multithreading backend") + message(STATUS "Dumux multithreading backed: TBB") + elseif(OpenMP_FOUND) + set(DUMUX_MULTITHREADING_BACKEND "OpenMP" CACHE STRING "The multithreading backend") + message(STATUS "Dumux multithreading backed: OpenMP") + elseif(Kokkos_FOUND) + set(DUMUX_MULTITHREADING_BACKEND "Kokkos" CACHE STRING "The multithreading backend") + message(STATUS "Dumux multithreading backed: Kokkos") + elseif(DUMUX_HAVE_CXX_EXECUTION_POLICY) + set(DUMUX_MULTITHREADING_BACKEND "Cpp" CACHE STRING "The multithreading backend") + message(STATUS "Dumux multithreading backed: Cpp") + else() + set(DUMUX_MULTITHREADING_BACKEND "Serial" CACHE STRING "The multithreading backend") + message(STATUS "Dumux multithreading backed: Serial") + endif() + +# abort if a multithreading backend has been manually selected +# but it is not available +else() + if(DUMUX_MULTITHREADING_BACKEND STREQUAL "TBB" AND NOT TBB_FOUND) + message(FATAL_ERROR "Selected TBB as Dumux multithreading backed but TBB has not been found") + elseif(DUMUX_MULTITHREADING_BACKEND STREQUAL "OpenMP" AND NOT OpenMP_FOUND) + message(FATAL_ERROR "Selected OpenMP as Dumux multithreading backed but OpenMP has not been found") + elseif(DUMUX_MULTITHREADING_BACKEND STREQUAL "Kokkos" AND NOT Kokkos_FOUND) + message(FATAL_ERROR "Selected Kokkos as Dumux multithreading backed but Kokkos has not been found") + elseif(DUMUX_MULTITHREADING_BACKEND STREQUAL "Cpp" AND NOT DUMUX_HAVE_CXX_EXECUTION_POLICY) + message(FATAL_ERROR "Selected Cpp as Dumux multithreading backed but your compiler does not implement parallel STL") + else() + message(STATUS "Dumux multithreading backed: ${DUMUX_MULTITHREADING_BACKEND}") + endif() +endif() diff --git a/config.h.cmake b/config.h.cmake index f7f2bc4b053db7fe4ce5ba41320f5226b48b11db..199c9f972df03c8fc8d5075cc45435c0e97635c4 100644 --- a/config.h.cmake +++ b/config.h.cmake @@ -70,6 +70,20 @@ /* Define to 1 if quadmath was found */ #cmakedefine HAVE_QUAD 1 +/* Set if Kokkos was found */ +#cmakedefine HAVE_KOKKOS ENABLE_KOKKOS + +/* Set if OpenMP was found */ +#cmakedefine HAVE_OPENMP ENABLE_OPENMP + +/* Set the DUMUX_MULTITHREADING_BACKEND */ +#ifndef DUMUX_MULTITHREADING_BACKEND +#define DUMUX_MULTITHREADING_BACKEND ${DUMUX_MULTITHREADING_BACKEND} +#endif + +/* Set HAVE_CPP_PARALLEL_ALGORITHMS if available */ +#cmakedefine HAVE_CPP_PARALLEL_ALGORITHMS 1 + /* end dumux Everything below here will be overwritten */ diff --git a/dumux/assembly/coloring.hh b/dumux/assembly/coloring.hh new file mode 100644 index 0000000000000000000000000000000000000000..409d102b28dc447dbe67061110077fdab34e2173 --- /dev/null +++ b/dumux/assembly/coloring.hh @@ -0,0 +1,252 @@ +// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- +// vi: set et ts=4 sw=4 sts=4: +/***************************************************************************** + * See the file COPYING for full copying permissions. * + * * + * This program is free software: you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation, either version 3 of the License, or * + * (at your option) any later version. * + * * + * This program is distributed in the hope that it will be useful, * + * but WITHOUT ANY WARRANTY; without even the implied warranty of * + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * + * GNU General Public License for more details. * + * * + * You should have received a copy of the GNU General Public License * + * along with this program. If not, see <http://www.gnu.org/licenses/>. * + *****************************************************************************/ +/*! + * \file + * \ingroup Assembly + * \brief Coloring schemes for shared-memory-parallel assembly + */ +#ifndef DUMUX_ASSEMBLY_COLORING_HH +#define DUMUX_ASSEMBLY_COLORING_HH + +#include <vector> +#include <deque> +#include <iostream> +#include <tuple> + +#include <dune/common/timer.hh> +#include <dune/common/exceptions.hh> + +#include <dumux/io/format.hh> +#include <dumux/discretization/method.hh> + +#ifndef DOXYGEN // hide from doxygen +namespace Dumux::Detail { + +//! Compute a map from dof indices to element indices (helper data for coloring algorithm) +template <class GridGeometry> +std::vector<std::vector<std::size_t>> +computeDofToElementMap(const GridGeometry& gg) +{ + std::vector<std::vector<std::size_t>> dofToElements; + + if constexpr (GridGeometry::discMethod == DiscretizationMethods::cctpfa) + { + dofToElements.resize(gg.gridView().size(0)); + const auto& eMapper = gg.elementMapper(); + for (const auto& element : elements(gg.gridView())) + { + const auto eIdx = eMapper.index(element); + for (const auto& intersection : intersections(gg.gridView(), element)) + if (intersection.neighbor()) + dofToElements[eMapper.index(intersection.outside())].push_back(eIdx); + } + } + + else if constexpr (GridGeometry::discMethod == DiscretizationMethods::box) + { + static constexpr int dim = GridGeometry::GridView::dimension; + dofToElements.resize(gg.gridView().size(dim)); + const auto& vMapper = gg.vertexMapper(); + for (const auto& element : elements(gg.gridView())) + { + const auto eIdx = gg.elementMapper().index(element); + for (int i = 0; i < element.subEntities(dim); i++) + dofToElements[vMapper.subIndex(element, i, dim)].push_back(eIdx); + } + } + + else + DUNE_THROW(Dune::NotImplemented, + "Missing coloring scheme implementation for this discretization method" + ); + + return dofToElements; +} + +/*! + * \brief Compute the colors of neighboring nodes in the dependency graph + * + * Neighboring nodes are those elements that manipulate the + * same data structures (e.g. system matrix, volvars, flux cache) in the same places + * + * \param gridGeometry the grid geometry + * \param element the element we want to color + * \param colors a vector of current colors for each element (not assigned: -1) + * \param dofToElement a map from dof indices to element indices + * \param neighborColors a vector to add the colors of neighbor nodes to + */ +template<class GridGeometry, class DofToElementMap> +void addNeighborColors(const GridGeometry& gg, + const typename GridGeometry::LocalView::Element& element, + const std::vector<int>& colors, + const DofToElementMap& dofToElement, + std::vector<int>& neighborColors) +{ + if constexpr (GridGeometry::discMethod == DiscretizationMethods::cctpfa) + { + // we modify neighbor elements during the assembly + // check who else modifies these neighbor elements + const auto& eMapper = gg.elementMapper(); + for (const auto& intersection : intersections(gg.gridView(), element)) + { + if (intersection.neighbor()) + { + // direct face neighbors + const auto nIdx = eMapper.index(intersection.outside()); + neighborColors.push_back(colors[nIdx]); + + // neighbor-neighbors + for (const auto nnIdx : dofToElement[eMapper.index(intersection.outside())]) + neighborColors.push_back(colors[nnIdx]); + } + } + } + + else if constexpr (GridGeometry::discMethod == DiscretizationMethods::box) + { + // we modify the vertex dofs of our element during the assembly + // check who else modifies these vertex dofs + const auto& vMapper = gg.vertexMapper(); + static constexpr int dim = GridGeometry::GridView::dimension; + // direct vertex neighbors + for (int i = 0; i < element.subEntities(dim); i++) + for (auto eIdx : dofToElement[vMapper.subIndex(element, i, dim)]) + neighborColors.push_back(colors[eIdx]); + } + + else + DUNE_THROW(Dune::NotImplemented, + "Missing coloring scheme implementation for this discretization method" + ); +} + +/*! + * \brief Find the smallest color (integer >= 0) _not_ present in the given list of colors + * \param colors list of colors which are already taken + * \param notAssigned container to store which colors are not yet taken (is resized as required) + */ +int smallestAvailableColor(const std::vector<int>& colors, + std::vector<bool>& colorUsed) +{ + const int numColors = colors.size(); + colorUsed.assign(numColors, false); + + // The worst case for e.g. numColors=3 is colors={0, 1, 2} + // in which case we return 3 as smallest available color + // That means, we only track candidates in the (half-open) interval [0, numColors) + // Mark candidate colors which are present in colors + for (int i = 0; i < numColors; i++) + if (colors[i] >= 0 && colors[i] < numColors) + colorUsed[colors[i]] = true; + + // return smallest color not in colors + for (int i = 0; i < numColors; i++) + if (!colorUsed[i]) + return i; + + return numColors; +} + +} // end namespace Dumux::Detail +#endif // DOXYGEN + +namespace Dumux { + +/*! + * \brief Compute iterable lists of element seeds partitioned by color + * + * Splits up the elements of a grid view into partitions such that + * all elements in one partition do not modify global data structures + * at the same place during assembly. This is used to allow for + * lock-free thread-parallel (shared memory) assembly routines. + * + * Implements a simply greedy graph coloring algorithm: + * For each node (element), assign the smallest available color + * not used by any of the neighboring nodes (element with conflicting memory access) + * The greedy algorithm doesn't necessarily return the smallest + * possible number of colors (that's a hard problem) but is fast + * + * Returns a struct with access to the colors of each element (member colors) + * and vector of element seed sets of the same color (member sets) + * + * \param gridGeometry the grid geometry + * \param verbosity the verbosity level + */ +template<class GridGeometry> +auto computeColoring(const GridGeometry& gg, int verbosity = 1) +{ + Dune::Timer timer; + + using ElementSeed = typename GridGeometry::GridView::Grid::template Codim<0>::EntitySeed; + struct Coloring + { + using Sets = std::deque<std::vector<ElementSeed>>; + using Colors = std::vector<int>; + + Coloring(std::size_t size) : sets{}, colors(size, -1) {} + + Sets sets; + Colors colors; + }; + + Coloring coloring(gg.gridView().size(0)); + + // pre-reserve some memory for helper arrays to avoid reallocation + std::vector<int> neighborColors; neighborColors.reserve(30); + std::vector<bool> colorUsed; colorUsed.reserve(30); + + // dof to element map to speed up neighbor search + const auto dofToElement = Detail::computeDofToElementMap(gg); + + for (const auto& element : elements(gg.gridView())) + { + // compute neighbor colors based on discretization-dependent stencil + neighborColors.clear(); + Detail::addNeighborColors(gg, element, coloring.colors, dofToElement, neighborColors); + + // find smallest color (positive integer) not in neighborColors + const auto color = Detail::smallestAvailableColor(neighborColors, colorUsed); + + // assign color to element + coloring.colors[gg.elementMapper().index(element)] = color; + + // add element to the set of elements with the same color + if (color < coloring.sets.size()) + coloring.sets[color].push_back(element.seed()); + else + coloring.sets.push_back(std::vector<ElementSeed>{ element.seed() }); + } + + if (verbosity > 0) + std::cout << Fmt::format("Colored {} elements with {} colors in {} seconds.\n", + gg.gridView().size(0), coloring.sets.size(), timer.elapsed()); + + return coloring; +} + +//! Traits specifying if a given discretization tag supports coloring +template<class DiscretizationMethod> +struct SupportsColoring : public std::false_type {}; + +template<> struct SupportsColoring<DiscretizationMethods::CCTpfa> : public std::true_type {}; +template<> struct SupportsColoring<DiscretizationMethods::Box> : public std::true_type {}; + +} // end namespace Dumux + +#endif diff --git a/dumux/assembly/fvassembler.hh b/dumux/assembly/fvassembler.hh index e1a6614341bb27bd5395915b5825a435e12d8857..8522297f355792220fc74b8eb01bd3f884b8b2c6 100644 --- a/dumux/assembly/fvassembler.hh +++ b/dumux/assembly/fvassembler.hh @@ -24,7 +24,10 @@ #ifndef DUMUX_FV_ASSEMBLER_HH #define DUMUX_FV_ASSEMBLER_HH +#include <vector> +#include <deque> #include <type_traits> +#include <memory> #include <dune/istl/matrixindexset.hh> @@ -33,8 +36,13 @@ #include <dumux/discretization/method.hh> #include <dumux/linear/parallelhelpers.hh> -#include "jacobianpattern.hh" -#include "diffmethod.hh" +#include <dumux/assembly/coloring.hh> +#include <dumux/assembly/jacobianpattern.hh> +#include <dumux/assembly/diffmethod.hh> + +#include <dumux/parallel/multithreading.hh> +#include <dumux/parallel/parallel_for.hh> + #include "boxlocalassembler.hh" #include "cclocalassembler.hh" #include "fclocalassembler.hh" @@ -95,6 +103,7 @@ class FVAssembler using GridView = typename GridGeo::GridView; using LocalResidual = GetPropType<TypeTag, Properties::LocalResidual>; using Element = typename GridView::template Codim<0>::Entity; + using ElementSeed = typename GridView::Grid::template Codim<0>::EntitySeed; using TimeLoop = TimeLoopBase<GetPropType<TypeTag, Properties::Scalar>>; using SolutionVector = GetPropType<TypeTag, Properties::SolutionVector>; @@ -127,6 +136,11 @@ public: , isStationaryProblem_(true) { static_assert(isImplicit, "Explicit assembler for stationary problem doesn't make sense!"); + enableMultithreading_ = SupportsColoring<typename GridGeometry::DiscretizationMethod>::value + && !Multithreading::isSerial() + && getParam<bool>("Assembly.Multithreading", true); + + maybeComputeColors_(); } /*! @@ -145,7 +159,13 @@ public: , timeLoop_(timeLoop) , prevSol_(&prevSol) , isStationaryProblem_(!timeLoop) - {} + { + enableMultithreading_ = SupportsColoring<typename GridGeometry::DiscretizationMethod>::value + && !Multithreading::isSerial() + && getParam<bool>("Assembly.Multithreading", true); + + maybeComputeColors_(); + } /*! * \brief Assembles the global Jacobian of the residual @@ -258,8 +278,8 @@ public: else if (jacobian_->buildMode() != JacobianMatrix::BuildMode::random) DUNE_THROW(Dune::NotImplemented, "Only BCRS matrices with random build mode are supported at the moment"); - setJacobianPattern(); - setResidualSize(); + setResidualSize_(); + setJacobianPattern_(); } /*! @@ -272,13 +292,24 @@ public: jacobian_->setBuildMode(JacobianMatrix::random); residual_ = std::make_shared<SolutionVector>(); - setJacobianPattern(); - setResidualSize(); + setResidualSize_(); + setJacobianPattern_(); + } + + /*! + * \brief Resizes jacobian and residual and recomputes colors + */ + void updateAfterGridAdaption() + { + setResidualSize_(); + setJacobianPattern_(); + maybeComputeColors_(); } /*! * \brief Resizes the jacobian and sets the jacobian' sparsity pattern. */ + [[deprecated("Use updateAfterGridAdaption. Will be removed after release 3.5.")]] void setJacobianPattern() { // resize the jacobian and the residual @@ -290,9 +321,13 @@ public: // export pattern to jacobian occupationPattern.exportIdx(*jacobian_); + + // maybe recompute colors + maybeComputeColors_(); } //! Resizes the residual + [[deprecated("Use updateAfterGridAdaption. Will be removed after release 3.5.")]] void setResidualSize() { residual_->resize(numDofs()); } @@ -375,13 +410,40 @@ public: } private: + /*! + * \brief Resizes the jacobian and sets the jacobian' sparsity pattern. + */ + void setJacobianPattern_() + { + // resize the jacobian and the residual + const auto numDofs = this->numDofs(); + jacobian_->setSize(numDofs, numDofs); + + // create occupation pattern of the jacobian + const auto occupationPattern = getJacobianPattern<isImplicit>(gridGeometry()); + + // export pattern to jacobian + occupationPattern.exportIdx(*jacobian_); + } + + //! Resizes the residual + void setResidualSize_() + { residual_->resize(numDofs()); } + + //! Computes the colors + void maybeComputeColors_() + { + if (enableMultithreading_) + elementSets_ = computeColoring(gridGeometry()).sets; + } + // reset the residual vector to 0.0 void resetResidual_() { if(!residual_) { residual_ = std::make_shared<SolutionVector>(); - setResidualSize(); + setResidualSize_(); } (*residual_) = 0.0; @@ -395,7 +457,7 @@ private: { jacobian_ = std::make_shared<JacobianMatrix>(); jacobian_->setBuildMode(JacobianMatrix::random); - setJacobianPattern(); + setJacobianPattern_(); } if (partialReassembler) @@ -425,9 +487,26 @@ private: // try assembling using the local assembly function try { - // let the local assembler add the element contributions - for (const auto& element : elements(gridView())) - assembleElement(element); + if (enableMultithreading_) + { + assert(elementSets_.size() > 0); + + // make this element loop run in parallel + // for this we have to color the elements so that we don't get + // race conditions when writing into the global matrix + // each color can be assembled using multiple threads + for (const auto& elements : elementSets_) + { + Dumux::parallelFor(elements.size(), [&](const std::size_t i) + { + const auto element = gridView().grid().entity(elements[i]); + assembleElement(element); + }); + } + } + else + for (const auto& element : elements(gridView())) + assembleElement(element); // if we get here, everything worked well on this process succeeded = true; @@ -495,6 +574,10 @@ private: //! shared pointers to the jacobian matrix and residual std::shared_ptr<JacobianMatrix> jacobian_; std::shared_ptr<SolutionVector> residual_; + + //! element sets for parallel assembly + bool enableMultithreading_ = false; + std::deque<std::vector<ElementSeed>> elementSets_; }; } // namespace Dumux diff --git a/dumux/common/initialize.hh b/dumux/common/initialize.hh index c069c26148d40d0a4bc3d19a962b09f754bfa5d0..f3a4f35b70779d2ed7134aabfa9e71a323f4ff7e 100644 --- a/dumux/common/initialize.hh +++ b/dumux/common/initialize.hh @@ -24,8 +24,72 @@ #ifndef DUMUX_COMMON_INITIALIZE_HH #define DUMUX_COMMON_INITIALIZE_HH +#include <string> +#include <algorithm> +#include <cstdlib> + #include <dune/common/parallel/mpihelper.hh> +#if HAVE_TBB +#include <oneapi/tbb/info.h> +#include <oneapi/tbb/global_control.h> + +#ifndef DOXYGEN +namespace Dumux::Detail { + +class TBBGlobalControl +{ +public: + static oneapi::tbb::global_control& instance(int& argc, char* argv[]) + { + int maxNumThreads = oneapi::tbb::info::default_concurrency(); + if (const char* dumuxNumThreads = std::getenv("DUMUX_NUM_THREADS")) + maxNumThreads = std::max(1, std::stoi(std::string{ dumuxNumThreads })); + + static oneapi::tbb::global_control global_limit( + oneapi::tbb::global_control::max_allowed_parallelism, maxNumThreads + ); + + return global_limit; + } +}; + +} // namespace Dumux::Detail +#endif // DOXYGEN + +#endif // HAVE_TBB + + +#if HAVE_OPENMP +#include <omp.h> +#endif // HAVE_OPENMP + + +#if HAVE_KOKKOS +#include <Kokkos_Core.hpp> + +#ifndef DOXYGEN +namespace Dumux::Detail { + +class KokkosScopeGuard +{ +public: + static Kokkos::ScopeGuard& instance(int& argc, char* argv[]) + { + Kokkos::InitArguments arguments; + if (const char* dumuxNumThreads = std::getenv("DUMUX_NUM_THREADS")) + arguments.num_threads = std::max(1, std::stoi(std::string{ dumuxNumThreads })); + + static Kokkos::ScopeGuard guard(arguments); + return guard; + } +}; + +} // namespace Dumux::Detail +#endif // DOXYGEN + +#endif // HAVE_KOKKOS + namespace Dumux { void initialize(int& argc, char* argv[]) @@ -33,6 +97,23 @@ void initialize(int& argc, char* argv[]) // initialize MPI if available // otherwise this will create a sequential (fake) helper Dune::MPIHelper::instance(argc, argv); + +#if HAVE_TBB + // initialize TBB and keep global control alive + Detail::TBBGlobalControl::instance(argc, argv); +#endif + +#if HAVE_OPENMP + if (const char* dumuxNumThreads = std::getenv("DUMUX_NUM_THREADS")) + omp_set_num_threads( + std::max(1, std::stoi(std::string{ dumuxNumThreads })) + ); +#endif + +#if HAVE_KOKKOS + // initialize Kokkos (command line / environmental variable interface) + Detail::KokkosScopeGuard::instance(argc, argv); +#endif } } // end namespace Dumux diff --git a/dumux/common/loggingparametertree.hh b/dumux/common/loggingparametertree.hh index 3716ac7b1975ed3aaeb99021b26af75c0c6caba3..adbf7291cba228b15807614a996aaea27ea45496 100644 --- a/dumux/common/loggingparametertree.hh +++ b/dumux/common/loggingparametertree.hh @@ -27,6 +27,8 @@ #include <iomanip> #include <iostream> #include <string> +#include <memory> +#include <mutex> #include <dune/common/parametertree.hh> #include <dumux/common/exceptions.hh> @@ -50,7 +52,11 @@ public: * \brief Create LoggingParameterTree from ParameterTree */ LoggingParameterTree(const Dune::ParameterTree& params, const Dune::ParameterTree& defaultParams) - : params_(params), defaultParams_(defaultParams) {} + : params_(params) + , defaultParams_(defaultParams) + , usedRuntimeParams_(std::make_unique<Dune::ParameterTree>()) + , usedDefaultParams_(std::make_unique<Dune::ParameterTree>()) + {} /** \brief test for key * @@ -164,10 +170,10 @@ public: void reportAll(std::ostream& stream = std::cout) const { stream << "\n# Runtime-specified parameters used:" << std::endl; - usedRuntimeParams_.report(stream); + usedRuntimeParams_->report(stream); stream << "\n# Global default parameters used:" << std::endl; - usedDefaultParams_.report(stream); + usedDefaultParams_->report(stream); const auto unusedParams = getUnusedKeys(); if (!unusedParams.empty()) @@ -229,7 +235,7 @@ public: { // log that we used this parameter const auto returnValue = params_[key]; - usedRuntimeParams_[key] = returnValue; + logUsedRuntimeParam_(key, returnValue); return returnValue; } @@ -262,7 +268,7 @@ public: { // log that we used this parameter const auto returnValue = params_[compoundKey]; - usedRuntimeParams_[compoundKey] = returnValue; + logUsedRuntimeParam_(compoundKey, returnValue); return returnValue; } @@ -272,7 +278,7 @@ public: { // log that we used this parameter const auto returnValue = params_[compoundKey]; - usedRuntimeParams_[compoundKey] = returnValue; + logUsedRuntimeParam_(compoundKey, returnValue); return returnValue; } @@ -337,7 +343,7 @@ public: if (params_.hasKey(key)) { // log that we used this parameter - usedRuntimeParams_[key] = params_[key]; + logUsedRuntimeParam_(key, params_[key]); return params_.template get<T>(key); } @@ -370,7 +376,7 @@ public: if (params_.hasKey(compoundKey)) { // log that we used this parameter - usedRuntimeParams_[compoundKey] = params_[compoundKey]; + logUsedRuntimeParam_(compoundKey, params_[compoundKey]); return params_.template get<T>(compoundKey); } @@ -379,7 +385,7 @@ public: if (compoundKey != "") { // log that we used this parameter - usedRuntimeParams_[compoundKey] = params_[compoundKey]; + logUsedRuntimeParam_(compoundKey, params_[compoundKey]); return params_.template get<T>(compoundKey); } @@ -403,14 +409,14 @@ public: if (params_.hasKey(key)) { // log that we used this parameter - usedRuntimeParams_[key] = params_[key]; + logUsedRuntimeParam_(key, params_[key]); return params_.template get<T>(key); } else if(defaultParams_.hasKey(key)) { // use the default - usedDefaultParams_[key] = defaultParams_[key]; + logUsedDefaultParam_(key, defaultParams_[key]); return defaultParams_.template get<T>(key); } @@ -441,7 +447,7 @@ public: if (params_.hasKey(compoundKey)) { // log that we used this parameter - usedRuntimeParams_[compoundKey] = params_[compoundKey]; + logUsedRuntimeParam_(compoundKey, params_[compoundKey]); return params_.template get<T>(compoundKey); } @@ -450,7 +456,7 @@ public: if (compoundKey != "") { // log that we used this parameter - usedRuntimeParams_[compoundKey] = params_[compoundKey]; + logUsedRuntimeParam_(compoundKey, params_[compoundKey]); return params_.template get<T>(compoundKey); } @@ -461,7 +467,7 @@ public: if (params_.hasKey(key)) { // log that we used this parameter - usedRuntimeParams_[key] = params_[key]; + logUsedRuntimeParam_(key, params_[key]); return params_.template get<T>(key); } @@ -469,7 +475,7 @@ public: else if(defaultParams_.hasKey(compoundKey)) { // use the default - usedDefaultParams_[compoundKey] = defaultParams_[compoundKey]; + logUsedDefaultParam_(compoundKey, defaultParams_[compoundKey]); return defaultParams_.template get<T>(compoundKey); } @@ -480,14 +486,14 @@ public: if (compoundKey != "") { // log that we used this parameter - usedDefaultParams_[compoundKey] = defaultParams_[compoundKey]; + logUsedDefaultParam_(compoundKey, defaultParams_[compoundKey]); return defaultParams_.template get<T>(compoundKey); } if(defaultParams_.hasKey(key)) { // use the default - usedDefaultParams_[key] = defaultParams_[key]; + logUsedDefaultParam_(key, defaultParams_[key]); return defaultParams_.template get<T>(key); } @@ -522,7 +528,7 @@ private: // store keys which were not accessed const auto& keys = tree.getValueKeys(); for (const auto& key : keys) - if (key != "ParameterFile" && !usedRuntimeParams_.hasKey(prefix + key)) + if (key != "ParameterFile" && !usedRuntimeParams_->hasKey(prefix + key)) unusedParams.push_back(prefix + key); // recursively loop over all subtrees @@ -531,12 +537,32 @@ private: findUnusedKeys_(tree.sub(key), unusedParams, prefix + key + "."); } + /** \brief Log the key value pair as used runtime param + */ + void logUsedRuntimeParam_(const std::string& key, const std::string& value) const + { + std::scoped_lock lock{ usedRuntimeMutex_ }; + usedRuntimeParams_->operator[](key) = value; + } + + /** \brief Log the key value pair as used default param + */ + void logUsedDefaultParam_(const std::string& key, const std::string& value) const + { + std::scoped_lock lock{ usedDefaultMutex_ }; + usedDefaultParams_->operator[](key) = value; + } + const Dune::ParameterTree& params_; const Dune::ParameterTree& defaultParams_; - // logging caches - mutable Dune::ParameterTree usedRuntimeParams_; - mutable Dune::ParameterTree usedDefaultParams_; + // logging caches (externally stored) + std::unique_ptr<Dune::ParameterTree> usedRuntimeParams_; + std::unique_ptr<Dune::ParameterTree> usedDefaultParams_; + + // access to the caches have to be protected for thread-safety + mutable std::mutex usedRuntimeMutex_; + mutable std::mutex usedDefaultMutex_; }; } // end namespace Dumux diff --git a/dumux/parallel/multithreading.hh b/dumux/parallel/multithreading.hh new file mode 100644 index 0000000000000000000000000000000000000000..b8bf23402e5cc4fa7ea589ae40bbebcdbb8f6a93 --- /dev/null +++ b/dumux/parallel/multithreading.hh @@ -0,0 +1,63 @@ +// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- +// vi: set et ts=4 sw=4 sts=4: +/***************************************************************************** + * See the file COPYING for full copying permissions. * + * * + * This program is free software: you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation, either version 3 of the License, or * + * (at your option) any later version. * + * * + * This program is distributed in the hope that it will be useful, * + * but WITHOUT ANY WARRANTY; without even the implied warranty of * + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * + * GNU General Public License for more details. * + * * + * You should have received a copy of the GNU General Public License * + * along with this program. If not, see <http://www.gnu.org/licenses/>. * + *****************************************************************************/ + +/*! + * \file + * \ingroup Parallel + * \brief Multithreading in Dumux + */ +#ifndef DUMUX_PARALLEL_MULTITHREADING_HH +#define DUMUX_PARALLEL_MULTITHREADING_HH + +#ifndef DUMUX_MULTITHREADING_BACKEND +#define DUMUX_MULTITHREADING_BACKEND Serial +#endif + +namespace Dumux::Detail::Multithreading { + +namespace ExecutionBackends { + +struct Serial {}; +struct Cpp {}; +struct TBB {}; +struct Kokkos {}; +struct OpenMP {}; + +} // end namespace ExecutionBackends + +// set the execution backend type +using ExecutionBackend = ExecutionBackends::DUMUX_MULTITHREADING_BACKEND; + +} // end namespace Dumux::Detail::Multithreading + +namespace Dumux::Multithreading { + +/*! + * \ingroup Parallel + * \brief Checking whether the backend is serial + */ +inline constexpr bool isSerial() +{ + using namespace Dumux::Detail::Multithreading; + return std::is_same_v<ExecutionBackends::Serial, ExecutionBackend>; +}; + +} // end namespace Dumux + +#endif diff --git a/dumux/parallel/parallel_for.hh b/dumux/parallel/parallel_for.hh new file mode 100644 index 0000000000000000000000000000000000000000..fc3b9b79fbf4f474f7e681d67deb6e5758bb2715 --- /dev/null +++ b/dumux/parallel/parallel_for.hh @@ -0,0 +1,182 @@ +// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- +// vi: set et ts=4 sw=4 sts=4: +/***************************************************************************** + * See the file COPYING for full copying permissions. * + * * + * This program is free software: you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation, either version 3 of the License, or * + * (at your option) any later version. * + * * + * This program is distributed in the hope that it will be useful, * + * but WITHOUT ANY WARRANTY; without even the implied warranty of * + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * + * GNU General Public License for more details. * + * * + * You should have received a copy of the GNU General Public License * + * along with this program. If not, see <http://www.gnu.org/licenses/>. * + *****************************************************************************/ + +/*! + * \file + * \ingroup Parallel + * \brief Parallel for loop (multithreading) + */ + +#ifndef DUMUX_PARALLEL_PARALLEL_FOR_HH +#define DUMUX_PARALLEL_PARALLEL_FOR_HH + +#include <dumux/parallel/multithreading.hh> + +#if HAVE_CPP_PARALLEL_ALGORITHMS +#include <algorithm> +#include <execution> +#include <dune/common/rangeutilities.hh> +#endif + +#if HAVE_TBB +#include <tbb/parallel_for.h> +#endif + +#if HAVE_KOKKOS +#include <Kokkos_Core.hpp> +#endif + +// contents of the detail namespace might change +// any time without prior notice (do not use directly) +#ifndef DOXYGEN // hide from doxygen +namespace Dumux::Detail { + +// This should be specialized for different ExecutionBackends +template<class FunctorType, class ExecutionBackend> +class ParallelFor; + + +// Serial backend implementation +template<class FunctorType> +class ParallelFor<FunctorType, Multithreading::ExecutionBackends::Serial> +{ +public: + ParallelFor(const std::size_t count, const FunctorType& functor) + : functor_(functor), count_(count) {} + + void execute() const + { + for (std::size_t i = 0; i < count_; ++i) + functor_(i); + } + +private: + FunctorType functor_; + std::size_t count_; +}; + +#if HAVE_CPP_PARALLEL_ALGORITHMS +// C++ parallel algorithms backend implementation +template<class FunctorType> +class ParallelFor<FunctorType, Multithreading::ExecutionBackends::Cpp> +{ +public: + ParallelFor(const std::size_t count, const FunctorType& functor) + : functor_(functor), range_(count) {} + + void execute() const + { + std::for_each(std::execution::par_unseq, range_.begin(), range_.end(), functor_); + } + +private: + FunctorType functor_; + Dune::IntegralRange<std::size_t> range_; +}; +#endif + + +#if HAVE_TBB +// TBB backend implementation +template<class FunctorType> +class ParallelFor<FunctorType, Multithreading::ExecutionBackends::TBB> +{ +public: + ParallelFor(const std::size_t count, const FunctorType& functor) + : functor_(functor), count_(count) {} + + void execute() const + { + tbb::parallel_for(std::size_t{0}, count_, [&](const std::size_t i){ functor_(i); }); + } + +private: + FunctorType functor_; + std::size_t count_; +}; +#endif // HAVE_TBB + +#if HAVE_KOKKOS +// Kokkos backend implementation +template<class FunctorType> +class ParallelFor<FunctorType, Multithreading::ExecutionBackends::Kokkos> +{ +public: + ParallelFor(const std::size_t count, const FunctorType& functor) + : functor_(functor), count_(count) {} + + void execute() const + { + Kokkos::parallel_for(count_, [&](const std::size_t i){ functor_(i); }); + } + +private: + FunctorType functor_; + std::size_t count_; +}; +#endif // HAVE_KOKKOS + + +#if HAVE_OPENMP +// OpenMP backend implementation +template<class FunctorType> +class ParallelFor<FunctorType, Multithreading::ExecutionBackends::OpenMP> +{ +public: + ParallelFor(const std::size_t count, const FunctorType& functor) + : functor_(functor), count_(count) {} + + void execute() const + { + #pragma omp parallel for + for (std::size_t i = 0; i < count_; ++i) + functor_(i); + } + +private: + FunctorType functor_; + std::size_t count_; +}; +#endif // HAVE_OPENMP + + +} // end namespace Detail +#endif // DOXYGEN + + +namespace Dumux { + +/*! + * \file + * \ingroup Parallel + * \brief A parallel for loop (multithreading) + * \param count the number of work tasks to perform + * \param functor functor executed for each task (get task number as argument) + */ +template<class FunctorType> +inline void parallelFor(const std::size_t count, const FunctorType& functor) +{ + using ExecutionBackend = Detail::Multithreading::ExecutionBackend; + Detail::ParallelFor<FunctorType, ExecutionBackend> action(count, functor); + action.execute(); +} + +} // end namespace Dumux + +#endif diff --git a/dumux/python/assembly/fvassembler.hh b/dumux/python/assembly/fvassembler.hh index 1ede28dc7574a7a7f20bfa754c90a5489fa86afe..23642efb0e1da4b03ca85356fe7ae29339d020cc 100644 --- a/dumux/python/assembly/fvassembler.hh +++ b/dumux/python/assembly/fvassembler.hh @@ -59,10 +59,6 @@ void registerFVAssembler(pybind11::handle scope, pybind11::class_<FVAssembler, o cls.def_property_readonly("isStationaryProblem", &FVAssembler::isStationaryProblem); cls.def_property_readonly("gridVariables", [](FVAssembler& self) { return self.gridVariables(); }); - cls.def("updateGridVariables", [](FVAssembler& self, const SolutionVector& curSol){ - self.updateGridVariables(curSol); - }); - cls.def("assembleResidual", [](FVAssembler& self, const SolutionVector& curSol){ self.assembleResidual(curSol); }); diff --git a/examples/1protationsymmetry/doc/main.md b/examples/1protationsymmetry/doc/main.md index 2d475ca49e09862011e5a0cd66b4f4b8511bf22e..4d0f5120793b7130f59120105cca2f678d13b53d 100644 --- a/examples/1protationsymmetry/doc/main.md +++ b/examples/1protationsymmetry/doc/main.md @@ -30,8 +30,8 @@ and compute the convergence rates. #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> // for GetPropType #include <dumux/common/parameters.hh> // for getParam #include <dumux/common/integrate.hh> // for integrateL2Error @@ -56,8 +56,8 @@ int main(int argc, char** argv) try { using namespace Dumux; - // We initialize MPI. Finalization is done automatically on exit. - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // We parse the command line arguments. Parameters::init(argc, argv); @@ -173,13 +173,13 @@ in the input file. gridGeometry->update(gridManager.grid().leafGridView()); gridVariables->updateAfterGridAdaption(p); - p.resize(gridGeometry->numDofs()); - updateAnalyticalSolution(pExact); - // this recreates the linear system, i.e. the sizes of // the right hand side vector and the Jacobian matrix, // and its sparsity pattern. - assembler->setLinearSystem(); + assembler->updateAfterGridAdaption(); + + p.resize(gridGeometry->numDofs()); + updateAnalyticalSolution(pExact); // solve problem on refined grid solver.solve(p); diff --git a/examples/1protationsymmetry/main.cc b/examples/1protationsymmetry/main.cc index f75766827fcccc4536f59791b1f63692a490cb72..ebe5bbb58992a5b6a85fc4bb4bf8952c60eee1e5 100644 --- a/examples/1protationsymmetry/main.cc +++ b/examples/1protationsymmetry/main.cc @@ -27,8 +27,8 @@ #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> // for GetPropType #include <dumux/common/parameters.hh> // for getParam #include <dumux/common/integrate.hh> // for integrateL2Error @@ -51,8 +51,8 @@ int main(int argc, char** argv) try { using namespace Dumux; - // We initialize MPI. Finalization is done automatically on exit. - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // We parse the command line arguments. Parameters::init(argc, argv); @@ -160,13 +160,13 @@ int main(int argc, char** argv) try gridGeometry->update(gridManager.grid().leafGridView()); gridVariables->updateAfterGridAdaption(p); - p.resize(gridGeometry->numDofs()); - updateAnalyticalSolution(pExact); - // this recreates the linear system, i.e. the sizes of // the right hand side vector and the Jacobian matrix, // and its sparsity pattern. - assembler->setLinearSystem(); + assembler->updateAfterGridAdaption(); + + p.resize(gridGeometry->numDofs()); + updateAnalyticalSolution(pExact); // solve problem on refined grid solver.solve(p); diff --git a/examples/1ptracer/doc/main.md b/examples/1ptracer/doc/main.md index 3d9741c64f1020bfde92f41c00e350df0be3c3ef..986760dad8e553b0861a0e2ce83d30ace0c1494f 100644 --- a/examples/1ptracer/doc/main.md +++ b/examples/1ptracer/doc/main.md @@ -26,11 +26,11 @@ The code documentation is structured as follows: ### Included header files <details><summary> Click to show includes</summary> -These are DUNE helper classes related to parallel computations, time measurements and file I/O +These time measurements and parallel backend initialization ```cpp -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> ``` The following headers include functionality related to property definition or retrieval, as well as @@ -87,8 +87,8 @@ int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/1ptracer/main.cc b/examples/1ptracer/main.cc index 4faeafcdbf54aaf8a012c11a71d50aba9669a515..94a15e82d62b3e136c5cbc20dc84a2831e07e82d 100644 --- a/examples/1ptracer/main.cc +++ b/examples/1ptracer/main.cc @@ -28,9 +28,9 @@ #include <iostream> // [[/exclude]] -// These are DUNE helper classes related to parallel computations, time measurements and file I/O -#include <dune/common/parallel/mpihelper.hh> +// These time measurements and parallel backend initialization #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> // The following headers include functionality related to property definition or retrieval, as well as // the retrieval of input parameters specified in the input file or via the command line. @@ -69,8 +69,8 @@ int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/2pinfiltration/doc/main.md b/examples/2pinfiltration/doc/main.md index 88b4655dff7451a33c4084126620d4babff3a083..b9e9d0a4d2ebd1391a7758992521e5d33ead1178 100644 --- a/examples/2pinfiltration/doc/main.md +++ b/examples/2pinfiltration/doc/main.md @@ -42,6 +42,7 @@ In Dumux, a property system is used to specify the model. For this, different pr ```cpp #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> ``` We include the linear solver to be used to solve the linear system and the nonlinear Newton's method @@ -100,8 +101,9 @@ int main(int argc, char** argv) try // we define the type tag for this problem using TypeTag = Properties::TTag::PointSourceExample; - //We initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //We parse command line arguments and input file Parameters::init(argc, argv); @@ -296,12 +298,10 @@ We start the time loop. In each time step before we start calculating a new solu { // We overwrite the old solution with the new (resized & interpolated) one xOld = x; - // We tell the assembler to resize the matrix and set pattern - assembler->setJacobianPattern(); - // We tell the assembler to resize the residual - assembler->setResidualSize(); - // We initialize the secondary variables to the new (and "new old") solution + // We initialize the secondary variables to the new (and "new old") solution gridVariables->updateAfterGridAdaption(x); + // We also need to update the assembler (sizes of residual and Jacobian change) + assembler->updateAfterGridAdaption(); // We update the point source map problem->computePointSourceMap(); } diff --git a/examples/2pinfiltration/main.cc b/examples/2pinfiltration/main.cc index 3f590b274546bebd1eba4a735fb43f429cde2cc7..6a182be9bd1182f7f6d5e460d4215d7b56c24334 100644 --- a/examples/2pinfiltration/main.cc +++ b/examples/2pinfiltration/main.cc @@ -35,6 +35,7 @@ // In Dumux, a property system is used to specify the model. For this, different properties are defined containing type definitions, values and methods. All properties are declared in the file properties.hh. Additionally, we include the parameter class, which manages the definition of input parameters by a default value, the inputfile or the command line. #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> //We include the linear solver to be used to solve the linear system and the nonlinear Newton's method #include <dumux/linear/amgbackend.hh> @@ -72,8 +73,9 @@ int main(int argc, char** argv) try // we define the type tag for this problem using TypeTag = Properties::TTag::PointSourceExample; - //We initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //We parse command line arguments and input file Parameters::init(argc, argv); @@ -239,12 +241,10 @@ int main(int argc, char** argv) try { // We overwrite the old solution with the new (resized & interpolated) one xOld = x; - // We tell the assembler to resize the matrix and set pattern - assembler->setJacobianPattern(); - // We tell the assembler to resize the residual - assembler->setResidualSize(); - // We initialize the secondary variables to the new (and "new old") solution + // We initialize the secondary variables to the new (and "new old") solution gridVariables->updateAfterGridAdaption(x); + // We also need to update the assembler (sizes of residual and Jacobian change) + assembler->updateAfterGridAdaption(); // We update the point source map problem->computePointSourceMap(); } diff --git a/examples/biomineralization/doc/fluidmaterial.md b/examples/biomineralization/doc/fluidmaterial.md index a494709af8925e863207619f4a83d314daeaafa6..fffd89216f144fd0e65c7afdda7765991a03a914 100644 --- a/examples/biomineralization/doc/fluidmaterial.md +++ b/examples/biomineralization/doc/fluidmaterial.md @@ -62,7 +62,7 @@ The real work (creating the tables) is done by some external program by Span and ```cpp #include <assert.h> -#include <dumux/material/components/co2tablereader.hh> +#include <test/porousmediumflow/co2/co2tablereader.hh> namespace Dumux::ICP { #include "co2valueslaboratory.inc" }// end namespace Dumux::ICP @@ -114,7 +114,7 @@ public: // Based on a cell mass of 2.5e-16, the molar mass of cells would be 1.5e8 kg/mol, but such high molar masses would lead to numerical problems. static Scalar molarMass() { - Scalar molarMass = getParam<Scalar>("BioCoefficients.SuspendedBiomassMolarMass", 1); + static Scalar molarMass = getParam<Scalar>("BioCoefficients.SuspendedBiomassMolarMass", 1); return molarMass; } }; @@ -154,7 +154,6 @@ which are needed to model biomineralization. #include <dumux/material/components/co2.hh> #include <dumux/material/components/h2o.hh> #include <dumux/material/components/tabulatedcomponent.hh> -#include <dumux/material/components/co2tablereader.hh> #include <dumux/material/components/sodiumion.hh> #include <dumux/material/components/chlorideion.hh> #include <dumux/material/components/calciumion.hh> diff --git a/examples/biomineralization/doc/mainfile.md b/examples/biomineralization/doc/mainfile.md index 165c57104573c0b925fe6afe9479e5af5e92a109..017295283224685419dd03c440ab388e4a6df190 100644 --- a/examples/biomineralization/doc/mainfile.md +++ b/examples/biomineralization/doc/mainfile.md @@ -42,6 +42,7 @@ the retrieval of input parameters specified in the input file or via the command ```cpp #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> ``` The follwoing files contain the nonlinear Newtown method, the linear solver and the assembler @@ -87,8 +88,9 @@ int main(int argc, char** argv) try { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/biomineralization/doc/solidmaterial.md b/examples/biomineralization/doc/solidmaterial.md index c39a8a4c227dc2ccd346d68be07ed7fc3c6c50de..3662b2fec8f7431b94dbd962e7bab82b21b64482 100644 --- a/examples/biomineralization/doc/solidmaterial.md +++ b/examples/biomineralization/doc/solidmaterial.md @@ -87,7 +87,7 @@ public: // Based on a cell mass of 2.5e-16, the molar mass of cells would be 1.5e8 kg/mol, but biofilms are more than just cells and such high molar masses would lead to numerical problems. static Scalar molarMass() { - Scalar molarMass = getParam<Scalar>("BioCoefficients.BiofilmMolarMass", 1); + static Scalar molarMass = getParam<Scalar>("BioCoefficients.BiofilmMolarMass", 1); return molarMass; } @@ -95,7 +95,7 @@ public: // It is typically highly variable for different biofilms, thus we read it from params.input or use the default value of 10 kg/m^3 static Scalar solidDensity(Scalar temperature) { - Scalar rho = getParam<Scalar>("BioCoefficients.BiofilmDensity", 10); + static Scalar rho = getParam<Scalar>("BioCoefficients.BiofilmDensity", 10); return rho; } }; diff --git a/examples/biomineralization/main.cc b/examples/biomineralization/main.cc index 45fa5a95e6cde4bf857163572cdb15e36ab7e2b1..8bccfdb60b5d2fbf5463edb64625324b351b72d5 100644 --- a/examples/biomineralization/main.cc +++ b/examples/biomineralization/main.cc @@ -37,6 +37,7 @@ // the retrieval of input parameters specified in the input file or via the command line. #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> // The follwoing files contain the nonlinear Newtown method, the linear solver and the assembler #include <dumux/nonlinear/newtonsolver.hh> @@ -68,8 +69,9 @@ int main(int argc, char** argv) try { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/biomineralization/material/co2tableslaboratory.hh b/examples/biomineralization/material/co2tableslaboratory.hh index f0ad811d3556273bb8fdd3dbff46ac8748ca7848..972dc17ffb4286a038760b862898f3d1cd0cde3e 100644 --- a/examples/biomineralization/material/co2tableslaboratory.hh +++ b/examples/biomineralization/material/co2tableslaboratory.hh @@ -30,7 +30,7 @@ // [[codeblock]] #include <assert.h> -#include <dumux/material/components/co2tablereader.hh> +#include <test/porousmediumflow/co2/co2tablereader.hh> namespace Dumux::ICP { #include "co2valueslaboratory.inc" }// end namespace Dumux::ICP diff --git a/examples/biomineralization/material/components/biofilm.hh b/examples/biomineralization/material/components/biofilm.hh index 7fe1505926a52ae5d62f44617cf337daa057b32b..7feb5604c1a0cfdaa7f6b838c90fac2073b69d34 100644 --- a/examples/biomineralization/material/components/biofilm.hh +++ b/examples/biomineralization/material/components/biofilm.hh @@ -57,7 +57,7 @@ public: // Based on a cell mass of 2.5e-16, the molar mass of cells would be 1.5e8 kg/mol, but biofilms are more than just cells and such high molar masses would lead to numerical problems. static Scalar molarMass() { - Scalar molarMass = getParam<Scalar>("BioCoefficients.BiofilmMolarMass", 1); + static Scalar molarMass = getParam<Scalar>("BioCoefficients.BiofilmMolarMass", 1); return molarMass; } @@ -65,7 +65,7 @@ public: // It is typically highly variable for different biofilms, thus we read it from params.input or use the default value of 10 kg/m^3 static Scalar solidDensity(Scalar temperature) { - Scalar rho = getParam<Scalar>("BioCoefficients.BiofilmDensity", 10); + static Scalar rho = getParam<Scalar>("BioCoefficients.BiofilmDensity", 10); return rho; } }; diff --git a/examples/biomineralization/material/components/suspendedbiomass.hh b/examples/biomineralization/material/components/suspendedbiomass.hh index ce5e2a5f227594926519ded73621a338222ba6f8..37399221dae0b3112f470ab839bab29a1c7b89ec 100644 --- a/examples/biomineralization/material/components/suspendedbiomass.hh +++ b/examples/biomineralization/material/components/suspendedbiomass.hh @@ -55,7 +55,7 @@ public: // Based on a cell mass of 2.5e-16, the molar mass of cells would be 1.5e8 kg/mol, but such high molar masses would lead to numerical problems. static Scalar molarMass() { - Scalar molarMass = getParam<Scalar>("BioCoefficients.SuspendedBiomassMolarMass", 1); + static Scalar molarMass = getParam<Scalar>("BioCoefficients.SuspendedBiomassMolarMass", 1); return molarMass; } }; diff --git a/examples/biomineralization/material/fluidsystems/biominsimplechemistry.hh b/examples/biomineralization/material/fluidsystems/biominsimplechemistry.hh index 006f1c7e1ad9d242823bf1930146fbe988dda04d..ee407715cb00975a772acd7467384a0920203f51 100644 --- a/examples/biomineralization/material/fluidsystems/biominsimplechemistry.hh +++ b/examples/biomineralization/material/fluidsystems/biominsimplechemistry.hh @@ -42,7 +42,6 @@ #include <dumux/material/components/co2.hh> #include <dumux/material/components/h2o.hh> #include <dumux/material/components/tabulatedcomponent.hh> -#include <dumux/material/components/co2tablereader.hh> #include <dumux/material/components/sodiumion.hh> #include <dumux/material/components/chlorideion.hh> #include <dumux/material/components/calciumion.hh> diff --git a/examples/freeflowchannel/README.md b/examples/freeflowchannel/README.md index 00c7fce8980b48aad9e61888912fa6f57794aa0a..e361dd3a4c3c21720c97a093f8ea5c33e2c770e0 100644 --- a/examples/freeflowchannel/README.md +++ b/examples/freeflowchannel/README.md @@ -375,6 +375,7 @@ the retrieval of input parameters specified in the input file or via the command ```cpp #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> ``` The following files contain the non-linear Newton solver, the available linear solver backends and the assembler for the linear @@ -430,8 +431,9 @@ int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/freeflowchannel/main.cc b/examples/freeflowchannel/main.cc index abe4b4e163b70ecf368d6ef6acfd126ac9aa8f7a..eda6cc688f8857ca07a87186daa15195962960a1 100644 --- a/examples/freeflowchannel/main.cc +++ b/examples/freeflowchannel/main.cc @@ -37,6 +37,7 @@ // the retrieval of input parameters specified in the input file or via the command line. #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> // The following files contain the non-linear Newton solver, the available linear solver backends and the assembler for the linear // systems arising from the staggered-grid discretization. @@ -75,8 +76,9 @@ int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/liddrivencavity/doc/problem.md b/examples/liddrivencavity/doc/problem.md index d8f4725da4de7f2713de27aeec68f777d9faba56..a76c6fcad560d22d40b0a471fd2d8e4611f5770e 100644 --- a/examples/liddrivencavity/doc/problem.md +++ b/examples/liddrivencavity/doc/problem.md @@ -158,7 +158,6 @@ conditions for the Navier-Stokes single-phase flow simulation. ```cpp #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> -#include <dumux/common/numeqvector.hh> ``` Include the `NavierStokesProblem` class, the base @@ -191,8 +190,10 @@ class LidDrivenCavityExampleProblem : public NavierStokesProblem<TypeTag> using SubControlVolume = typename GridGeometry::SubControlVolume; using SubControlVolumeFace = typename GridGeometry::SubControlVolumeFace; using Indices = typename GetPropType<TypeTag, Properties::ModelTraits>::Indices; - using PrimaryVariables = typename ParentType::PrimaryVariables; - using NumEqVector = typename ParentType::NumEqVector; + using InitialValues = typename ParentType::InitialValues; + using Sources = typename ParentType::Sources; + using DirichletValues = typename ParentType::DirichletValues; + using BoundaryFluxes = typename ParentType::BoundaryFluxes; using Scalar = GetPropType<TypeTag, Properties::Scalar>; static constexpr auto dimWorld = GridGeometry::GridView::dimensionworld; @@ -235,9 +236,9 @@ The following function specifies the __values on Dirichlet boundaries__. We need to define values for the primary variables (velocity). ```cpp - PrimaryVariables dirichletAtPos(const GlobalPosition &globalPos) const + DirichletValues dirichletAtPos(const GlobalPosition &globalPos) const { - PrimaryVariables values(0.0); + DirichletValues values(0.0); if constexpr (ParentType::isMomentumProblem()) { @@ -254,13 +255,13 @@ We define a (zero) mass flux here. ```cpp template<class ElementVolumeVariables, class ElementFluxVariablesCache> - NumEqVector neumann(const Element& element, - const FVElementGeometry& fvGeometry, - const ElementVolumeVariables& elemVolVars, - const ElementFluxVariablesCache& elemFluxVarsCache, - const SubControlVolumeFace& scvf) const + BoundaryFluxes neumann(const Element& element, + const FVElementGeometry& fvGeometry, + const ElementVolumeVariables& elemVolVars, + const ElementFluxVariablesCache& elemFluxVarsCache, + const SubControlVolumeFace& scvf) const { - NumEqVector values(0.0); + BoundaryFluxes values(0.0); if constexpr (!ParentType::isMomentumProblem()) { @@ -286,9 +287,9 @@ constraint for pressure__ in a single cell. { return !ParentType::isMomentumProblem(); } // Set a fixed pressure a the lower-left cell. - std::bitset<PrimaryVariables::dimension> hasInternalDirichletConstraint(const Element& element, const SubControlVolume& scv) const + std::bitset<DirichletValues::dimension> hasInternalDirichletConstraint(const Element& element, const SubControlVolume& scv) const { - std::bitset<PrimaryVariables::dimension> values; + std::bitset<DirichletValues::dimension> values; if constexpr (!ParentType::isMomentumProblem()) { @@ -301,8 +302,8 @@ constraint for pressure__ in a single cell. } // Specify the pressure value in the internal Dirichlet cell. - PrimaryVariables internalDirichlet(const Element& element, const SubControlVolume& scv) const - { return PrimaryVariables(1.1e5); } + DirichletValues internalDirichlet(const Element& element, const SubControlVolume& scv) const + { return DirichletValues(1.1e5); } ``` Setting a __reference pressure__ can help to improve the Newton convergence rate by making the numerical derivatives more exact. @@ -318,9 +319,9 @@ This is related to floating point arithmetic as pressure values are usually much The following function defines the initial conditions. ```cpp - PrimaryVariables initialAtPos(const GlobalPosition &globalPos) const + InitialValues initialAtPos(const GlobalPosition &globalPos) const { - PrimaryVariables values(0.0); + InitialValues values(0.0); if constexpr (!ParentType::isMomentumProblem()) values[Indices::pressureIdx] = 1.0e+5; @@ -365,6 +366,7 @@ the retrieval of input parameters specified in the input file or via the command ```cpp #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> +#include <dumux/common/initialize.hh> ``` The following files contain the multi-domain Newton solver, the available linear solver backends and the assembler for the linear @@ -449,8 +451,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune, it is finalized automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/liddrivencavity/main.cc b/examples/liddrivencavity/main.cc index b0e1d8ba2859a48615c4d3756a965e955e7dd1d7..9e99893bf10f884bd1d2a9d5cf4665d9294412fb 100644 --- a/examples/liddrivencavity/main.cc +++ b/examples/liddrivencavity/main.cc @@ -37,6 +37,7 @@ // the retrieval of input parameters specified in the input file or via the command line. #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> +#include <dumux/common/initialize.hh> // The following files contain the multi-domain Newton solver, the available linear solver backends and the assembler for the linear // systems arising from the staggered-grid discretization. @@ -106,8 +107,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune, it is finalized automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/porenetwork_upscaling/doc/main.md b/examples/porenetwork_upscaling/doc/main.md index 4433880183bc7c5797fec360cdddf27f36ad13a7..7c85039daa388ce1d3efb57681d2ec576d0ce455 100644 --- a/examples/porenetwork_upscaling/doc/main.md +++ b/examples/porenetwork_upscaling/doc/main.md @@ -34,6 +34,7 @@ Pore-Network-Model to evaluate the upscaled Darcy permeability of a given networ #include <dumux/common/properties.hh> // for GetPropType #include <dumux/common/parameters.hh> // for getParam +#include <dumux/common/initialize.hh> #include <dumux/linear/seqsolverbackend.hh> // for ILU0BiCGSTABBackend #include <dumux/linear/pdesolver.hh> // for LinearPDESolver @@ -60,6 +61,9 @@ int main(int argc, char** argv) try { using namespace Dumux; + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + // We parse the command line arguments. Parameters::init(argc, argv); diff --git a/examples/porenetwork_upscaling/doc/problem.md b/examples/porenetwork_upscaling/doc/problem.md index 1c4289a2de558d498ce44906a1ab1b092f662bf2..7a5514983565fb2146b2ecdd2a5a9fb743550727 100644 --- a/examples/porenetwork_upscaling/doc/problem.md +++ b/examples/porenetwork_upscaling/doc/problem.md @@ -183,7 +183,6 @@ class UpscalingProblem : public PorousMediumFlowProblem<TypeTag> ``` </details> - ### The constructor of our problem. ```cpp diff --git a/examples/porenetwork_upscaling/main.cc b/examples/porenetwork_upscaling/main.cc index af9cb30c19e1b41649836347c1ea941b156a3325..2cedb4fdfbb97986b4475e8f816ea66d17fb2cd7 100644 --- a/examples/porenetwork_upscaling/main.cc +++ b/examples/porenetwork_upscaling/main.cc @@ -31,6 +31,7 @@ #include <dumux/common/properties.hh> // for GetPropType #include <dumux/common/parameters.hh> // for getParam +#include <dumux/common/initialize.hh> #include <dumux/linear/seqsolverbackend.hh> // for ILU0BiCGSTABBackend #include <dumux/linear/pdesolver.hh> // for LinearPDESolver @@ -55,6 +56,9 @@ int main(int argc, char** argv) try { using namespace Dumux; + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + // We parse the command line arguments. Parameters::init(argc, argv); diff --git a/examples/shallowwaterfriction/README.md b/examples/shallowwaterfriction/README.md index e5327b6a8b4b457244aa43525537b530af0ec8a3..2b98d77446b541d14c8e02fce6da44ceee21bbe2 100644 --- a/examples/shallowwaterfriction/README.md +++ b/examples/shallowwaterfriction/README.md @@ -73,27 +73,29 @@ where $`\mathbf{U}`$, $`\mathbf{F}`$ and $`\mathbf{G}`$ defined as $`h`$ the water depth, $`u`$ the velocity in x-direction and $`v`$ the velocity in y-direction, $`g`$ is the constant of gravity. -The source terms for bed slope $`\mathbf{S_b}`$ and bottom friction +The source terms for the bed slope $`\mathbf{S_b}`$ and friction $`\mathbf{S_f}`$ are given as ```math \mathbf{S_b} = \begin{bmatrix} 0 \\ -gh \frac{\partial z}{\partial x} \\ -gh \frac{\partial z}{\partial y}\end{bmatrix}, -\mathbf{S_f} = \begin{bmatrix} 0 \\ -\frac{\tau_x}{\rho} \\ -\frac{\tau_y}{\rho} \end{bmatrix}. +\mathbf{S_f} = \begin{bmatrix} 0 \\ghS_{fx} \\ghS_{fy}\end{bmatrix}. ``` -with the bed surface $`z`$. $`\rho`$ is the water density. $`\tau_x`$ and $`\tau_y`$ are the bottom shear stress components in x- an y-direction, respectively. -The bottom shear stress is calculated by Manning's law. +with the bedSurface $`z`$. $`S_{fx}`$ and $`S_{fy}`$ are the bed shear stess +components in x- and y-direction, which are calculated by Manning's law. ### Mannings law -The empirical Manning model specifies the bottom shear stress by the following equation +The empirical Manning model specifies the bed shear stress by the following equations: ```math -\mathbf{\tau} = \frac{n^2 g\rho}{h^{1/3}} \sqrt{u^2 + v^2} \begin{bmatrix} u \\ v \end{bmatrix} +S_{fx} = \frac{n^2u}{R_{hy}^{4/3}} \sqrt(u^2 + v^2), + +S_{fy} = \frac{n^2v}{R_{hy}^{4/3}} \sqrt(u^2 + v^2) ``` -$`n`$ is Manning's friction value. -In addition, the dumux shallow water model extends the water depth by a roughness hight to limit the friction for small water depth. +$`n`$ is Manning's friction value and $`R_{hy}`$ is the hydraulic radius, +which is assumed to be equal to the water depth $`h`$. ### Analytical solution Since normal flow conditions are assumed, the analytic solution is calculated using the equation @@ -103,8 +105,7 @@ of Gauckler, Manning and Strickler: v_m = n^{-1} R_{hy}^{2/3} I_s^{1/2} ``` -$`R_{hy}`$ is the hydraulic radius, which is assumed to be equal to the water depth $`h`$. -The mean velocity $`v_m`$ is given as +Where the mean velocity $`v_m`$ is given as ```math v_m = \frac{q}{h} diff --git a/examples/shallowwaterfriction/doc/main.md b/examples/shallowwaterfriction/doc/main.md index 6989c8bb8bb2e0c119fa21dc635f964797e854f4..24b19fc54cc9b8a3caab21bf81e3955c483cbb40 100644 --- a/examples/shallowwaterfriction/doc/main.md +++ b/examples/shallowwaterfriction/doc/main.md @@ -39,6 +39,7 @@ the retrieval of input parameters specified in the input file or via the command ```cpp #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> ``` The following files contains the available linear solver backends, the non linear Newton Solver @@ -76,8 +77,8 @@ We include the header file specifing the properties of this example ### The main function We will now discuss the main program flow implemented within the `main` function. -At the beginning of each program using Dune, an instance of `Dune::MPIHelper` has to -be created. Moreover, we parse the run-time arguments from the command line and the +At the beginning of each program we initialize (e.g. potential parallel backends like MPI). +Moreover, we parse the run-time arguments from the command line and the input file: ```cpp @@ -85,8 +86,8 @@ int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // We parse command line arguments and input file Parameters::init(argc, argv); diff --git a/examples/shallowwaterfriction/doc/swe.md b/examples/shallowwaterfriction/doc/swe.md index afa83a5b1441920069fe4c4b3e4d4213e1de255c..b69c8e01fd3a2a80230debaf9ecf91c39de4c2d4 100644 --- a/examples/shallowwaterfriction/doc/swe.md +++ b/examples/shallowwaterfriction/doc/swe.md @@ -298,20 +298,20 @@ Accordingly, the third entry of the `bottomFrictionSource` is equal to the secon ```cpp NumEqVector bottomFrictionSource(const Element& element, - const FVElementGeometry& fvGeometry, - const ElementVolumeVariables& elemVolVars, - const SubControlVolume &scv) const + const FVElementGeometry& fvGeometry, + const ElementVolumeVariables& elemVolVars, + const SubControlVolume &scv) const { NumEqVector bottomFrictionSource(0.0); const auto& volVars = elemVolVars[scv]; // bottom shear stress vector - Dune::FieldVector<Scalar, 2> bottomShearStress = this->spatialParams().frictionLaw(element, scv).shearStress(volVars); + Dune::FieldVector<Scalar, 2> bottomShearStress = this->spatialParams().frictionLaw(element, scv).bottomShearStress(volVars); // source term due to bottom friction bottomFrictionSource[0] = 0.0; - bottomFrictionSource[1] = bottomShearStress[0]; - bottomFrictionSource[2] = bottomShearStress[1]; + bottomFrictionSource[1] = -bottomShearStress[0] / volVars.density(); + bottomFrictionSource[2] = -bottomShearStress[1] / volVars.density(); return bottomFrictionSource; } @@ -461,11 +461,10 @@ surface has a non constant distribution. ### Include files -We include the basic spatial parameters file for free flow, from which we will inherit. +We include the basic spatial parameters file for finite volumes, from which we will inherit. ```cpp #include <dumux/freeflow/spatialparams.hh> - ``` We include all friction laws. @@ -481,8 +480,9 @@ We include all friction laws. In the `RoughChannelSpatialParams` class, we define all functions needed to describe the rough channel for the shallow water problem. -We inherit from the `FreeFlowSpatialParams` class, which is the base class -for spatial parameters in the context of free-flow applications. +We inherit from the `FVSpatialParams` class, which is the base class +for spatial parameters in the context of +applications using finite volume discretization schemes. ```cpp namespace Dumux { @@ -490,7 +490,7 @@ namespace Dumux { template<class GridGeometry, class Scalar, class VolumeVariables> class RoughChannelSpatialParams : public FreeFlowSpatialParams<GridGeometry, Scalar, - RoughChannelSpatialParams<GridGeometry, Scalar, VolumeVariables>> + RoughChannelSpatialParams<GridGeometry, Scalar, VolumeVariables>> { // This convenience aliases will be used throughout this class using ThisType = RoughChannelSpatialParams<GridGeometry, Scalar, VolumeVariables>; diff --git a/examples/shallowwaterfriction/main.cc b/examples/shallowwaterfriction/main.cc index 5e6dfd4ed05d693e5fb967454ee84cc52c40d1e6..ea8c4649ee343d9f95170693fe616511fcbba027 100644 --- a/examples/shallowwaterfriction/main.cc +++ b/examples/shallowwaterfriction/main.cc @@ -35,6 +35,7 @@ // the retrieval of input parameters specified in the input file or via the command line. #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> // The following files contains the available linear solver backends, the non linear Newton Solver // and the assembler for the linear systems arising from finite volume discretizations @@ -58,16 +59,16 @@ // ### The main function // We will now discuss the main program flow implemented within the `main` function. -// At the beginning of each program using Dune, an instance of `Dune::MPIHelper` has to -// be created. Moreover, we parse the run-time arguments from the command line and the +// At the beginning of each program we initialize (e.g. potential parallel backends like MPI). +// Moreover, we parse the run-time arguments from the command line and the // input file: // [[codeblock]] int main(int argc, char** argv) try { using namespace Dumux; - // The Dune MPIHelper must be instantiated for each program using Dune - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // We parse command line arguments and input file Parameters::init(argc, argv); diff --git a/python/dumux/assembly/__init__.py b/python/dumux/assembly/__init__.py index b9e9ddcd3a9c3fdedf863ddd3c9c2050974744cf..18f74dc55231e25c6845e025ab9c055a46d0ef77 100644 --- a/python/dumux/assembly/__init__.py +++ b/python/dumux/assembly/__init__.py @@ -1,3 +1,6 @@ +# pylint: skip-file +# until that decorator bit can be removed again + """Classes and function related to the assembly of linear systems""" from dune.generator.generator import SimpleGenerator @@ -5,6 +8,25 @@ from dune.common.hashit import hashIt from dumux.wrapping import cppWrapperCreator, cppWrapperClassAlias +def decoratePre(pre): + def wrappedPre(*args, **kwargs): + preamble = pre(*args, **kwargs) + newPreamble = "" + for line in preamble.split("\n"): + newPreamble += line + "\n" + if line.startswith("#include <config.h>"): + newPreamble += "#undef DUMUX_MULTITHREADING_BACKEND\n" + newPreamble += "#define DUMUX_MULTITHREADING_BACKEND Serial\n" + return newPreamble + + return wrappedPre + + +myAttributes = vars(SimpleGenerator).copy() +myAttributes["pre"] = decoratePre(myAttributes["pre"]) +MySimpleGenerator = type("MySimpleGenerator", (object,), myAttributes) + + @cppWrapperCreator def _createFVAssembler(*, problem, gridVariables, model, diffMethod="numeric", isImplicit=True): """ @@ -40,13 +62,20 @@ def _createFVAssembler(*, problem, gridVariables, model, diffMethod="numeric", i includes += ["dumux/python/assembly/fvassembler.hh"] moduleName = "fvassembler_" + hashIt(assemblerType) - generator = SimpleGenerator("FVAssembler", "Dumux::Python") + # remark: use SimpleGenerator again starting with dune 2.9 + generator = MySimpleGenerator("FVAssembler", "Dumux::Python") module = generator.load( includes, assemblerType, moduleName, holder="std::shared_ptr", preamble=model.cppHeader, + # make sure the assembler is compiled with the Serial backend + # as currently the assembly in combination with Python is not thread-safe + # the following is nicer but only works with dune > 2.8 + # extraCMake=[ + # "target_compile_definitions(TARGET PUBLIC DUMUX_MULTITHREADING_BACKEND=Serial)" + # ], ) return module.FVAssembler(problem, problem.gridGeometry(), gridVariables) diff --git a/python/dumux/common/CMakeLists.txt b/python/dumux/common/CMakeLists.txt index 65f0a6b1f590e515998bbf8fcb038b2c90ea9f88..21bad44ccf825f9063e6fdfac6fc8c2d58acfe09 100644 --- a/python/dumux/common/CMakeLists.txt +++ b/python/dumux/common/CMakeLists.txt @@ -2,7 +2,14 @@ add_python_targets(common __init__ properties ) -dune_add_pybind11_module(NAME _common) + +# currently the Python bindings only work with +# serial dumux code +dune_add_pybind11_module( + NAME _common + COMPILE_DEFINITIONS DUMUX_MULTITHREADING_BACKEND=Serial +) + set_property(TARGET _common PROPERTY LINK_LIBRARIES dunecommon dunegrid APPEND) if(SKBUILD) diff --git a/python/dumux/common/_common.cc b/python/dumux/common/_common.cc index b1a27adffabdbdc77f070a3fd367b565f7d32b0b..921a20b442b469f5565548ab0c1b69c63f4f0ae6 100644 --- a/python/dumux/common/_common.cc +++ b/python/dumux/common/_common.cc @@ -20,6 +20,7 @@ #include <dune/python/pybind11/pybind11.h> #include <dune/python/pybind11/stl.h> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/python/common/timeloop.hh> @@ -28,6 +29,11 @@ PYBIND11_MODULE(_common, module) using namespace Dumux; using pybind11::operator""_a; + // maybe initialize MPI and/or multithreading backend + int argc = 0; + char **argv = NULL; + Dumux::initialize(argc, argv); + // export time loop Python::registerTimeLoop<double>(module); diff --git a/test/common/functions/test_function_l2norm.cc b/test/common/functions/test_function_l2norm.cc index a649ffdf3ca912071218243b1e777abe8e4ca5d6..a04be0021dfb66a6945f82403c59e3829cb1bbde 100644 --- a/test/common/functions/test_function_l2norm.cc +++ b/test/common/functions/test_function_l2norm.cc @@ -10,6 +10,7 @@ #include <dune/functions/gridfunctions/discreteglobalbasisfunction.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/integrate.hh> #include <dumux/multidomain/glue.hh> #include <dumux/discretization/projection/projector.hh> @@ -19,8 +20,8 @@ int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // initialize parameters Dumux::Parameters::init([](auto& p){ diff --git a/test/common/parameters/test_loggingparametertree.cc b/test/common/parameters/test_loggingparametertree.cc index 12b4054d486d1a5c97490d9f2d714afef6bb1355..62532a407ee9aed0f97cc002bab748f426598a92 100644 --- a/test/common/parameters/test_loggingparametertree.cc +++ b/test/common/parameters/test_loggingparametertree.cc @@ -1,17 +1,17 @@ #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/exceptions.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // initialize parameter tree Parameters::init(argc, argv, "params.input"); diff --git a/test/common/spline/test_cubicspline.cc b/test/common/spline/test_cubicspline.cc index e6fdb1094d4b2293f1e67880d35664ac493b4b26..cb5127e97686460e968b5aa3e179c2c72b7e7155 100644 --- a/test/common/spline/test_cubicspline.cc +++ b/test/common/spline/test_cubicspline.cc @@ -28,7 +28,7 @@ #include <functional> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/math.hh> #include <dumux/common/cubicspline.hh> #include <dumux/io/gnuplotinterface.hh> @@ -43,7 +43,8 @@ std::vector<double> eval(const Function& f, const std::vector<double>& x) int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // we test the spline interpolation against a sample function const auto f = [](double x){ return 1.0 / ( 1.0 + x*x ); }; diff --git a/test/common/spline/test_monotonecubicspline.cc b/test/common/spline/test_monotonecubicspline.cc index 04596dcb17bd52bccaf3c96e8ed0c1c2a4842da8..3eaae6617fbc8a20342b6cad1eddef39393e6491 100644 --- a/test/common/spline/test_monotonecubicspline.cc +++ b/test/common/spline/test_monotonecubicspline.cc @@ -28,7 +28,7 @@ #include <functional> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/math.hh> #include <dumux/common/monotonecubicspline.hh> #include <dumux/io/gnuplotinterface.hh> @@ -43,7 +43,8 @@ std::vector<double> eval(const Function& f, const std::vector<double>& x) int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); const auto test = [](auto f, auto df, const auto& testPoints, const auto& samplePoints, const std::string& prefix) { diff --git a/test/common/timeloop/test_timeloop.cc b/test/common/timeloop/test_timeloop.cc index be9f5e923ae7ff0c573361f2c34b3efab57e8e0c..37e9d71828c3db1512617b1ed69b00ebf45ccbbc 100644 --- a/test/common/timeloop/test_timeloop.cc +++ b/test/common/timeloop/test_timeloop.cc @@ -10,11 +10,14 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/exceptions.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/timeloop.hh> int main(int argc, char* argv[]) { - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //! Standard time loop double tStart = 0; double tEnd = 1; double dt = 0.1; diff --git a/test/discretization/box/test_boxfvgeometry.cc b/test/discretization/box/test_boxfvgeometry.cc index 1434154d32ef052190f9f230096f444e31f34290..98ae68f315d698a7415b334424143bbe38a233fa 100644 --- a/test/discretization/box/test_boxfvgeometry.cc +++ b/test/discretization/box/test_boxfvgeometry.cc @@ -31,6 +31,7 @@ #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/discretization/box/fvgridgeometry.hh> #ifndef DOXYGEN @@ -50,8 +51,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); std::cout << "Checking the FVGeometries, SCVs and SCV faces" << std::endl; diff --git a/test/discretization/cellcentered/tpfa/test_tpfafvgeometry.cc b/test/discretization/cellcentered/tpfa/test_tpfafvgeometry.cc index 435fb6e8620e254971d64b8c23be53cc95b7137e..2c6d4b1d04dd59158ff3ad8a650202882213f49f 100644 --- a/test/discretization/cellcentered/tpfa/test_tpfafvgeometry.cc +++ b/test/discretization/cellcentered/tpfa/test_tpfafvgeometry.cc @@ -31,6 +31,7 @@ #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> #ifndef DOXYGEN @@ -50,8 +51,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); std::cout << "Checking the FVGeometries, SCVs and SCV faces" << std::endl; diff --git a/test/discretization/cellcentered/tpfa/test_tpfafvgeometry_nonconforming.cc b/test/discretization/cellcentered/tpfa/test_tpfafvgeometry_nonconforming.cc index 83fec504aba636d4d92882d47514910f7f17cc12..53cb0c725f54bd591d206b4ed6800a890b0c6b35 100644 --- a/test/discretization/cellcentered/tpfa/test_tpfafvgeometry_nonconforming.cc +++ b/test/discretization/cellcentered/tpfa/test_tpfafvgeometry_nonconforming.cc @@ -33,6 +33,7 @@ #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/alugrid/grid.hh> +#include <dumux/common/initialize.hh> #include <dumux/adaptive/markelements.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> @@ -111,8 +112,8 @@ int main (int argc, char *argv[]) using namespace Dumux; using namespace Dumux::Test; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); std::cout << "Checking the FVGeometries, SCVs and SCV faces on a non-conforming grid" << std::endl; diff --git a/test/discretization/facecentered/staggered/test_staggeredfvgeometry.cc b/test/discretization/facecentered/staggered/test_staggeredfvgeometry.cc index 50ca19c3fa1fae4c268d73bd2bfd9069ef218485..b5ecdc04a704607d86c555ca17fb25bfa38c6a1a 100644 --- a/test/discretization/facecentered/staggered/test_staggeredfvgeometry.cc +++ b/test/discretization/facecentered/staggered/test_staggeredfvgeometry.cc @@ -31,6 +31,7 @@ #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/common/intersectionmapper.hh> #include <dumux/common/defaultmappertraits.hh> @@ -57,8 +58,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/discretization/projection/test_projection_2d1d.cc b/test/discretization/projection/test_projection_2d1d.cc index 5f54a9b558ab2421994440c6a4bc45fb99416079..0ec08137bd303a698dbf29e085abb093cd775457 100644 --- a/test/discretization/projection/test_projection_2d1d.cc +++ b/test/discretization/projection/test_projection_2d1d.cc @@ -41,6 +41,7 @@ #include <dumux/io/container.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/integrate.hh> #include <dumux/discretization/box/fvgridgeometry.hh> @@ -51,8 +52,8 @@ int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // initialize parameter tree Dumux::Parameters::init(argc, argv); diff --git a/test/discretization/rotationsymmetry/test_rotationsymmetric_gridgeometry.cc b/test/discretization/rotationsymmetry/test_rotationsymmetric_gridgeometry.cc index b73501052217d9f88c71fa8e5b955d7933856aae..c761e86fa5e974cda207ac64a1a1f4925d529fbf 100644 --- a/test/discretization/rotationsymmetry/test_rotationsymmetric_gridgeometry.cc +++ b/test/discretization/rotationsymmetry/test_rotationsymmetric_gridgeometry.cc @@ -24,13 +24,14 @@ #include <iostream> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/exceptions.hh> #include <dune/common/float_cmp.hh> #include <dune/geometry/quadraturerules.hh> #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> + +#include <dumux/common/initialize.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> #include <dumux/discretization/extrusion.hh> @@ -91,8 +92,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // test disc extrusion 1d->2d { diff --git a/test/discretization/staggered/test_staggered_free_flow_geometry.cc b/test/discretization/staggered/test_staggered_free_flow_geometry.cc index 87bd843022c4008a39ce68a945bbe0f4f8a20336..5a1871830c685223d4516d477c5d68983a7163f1 100644 --- a/test/discretization/staggered/test_staggered_free_flow_geometry.cc +++ b/test/discretization/staggered/test_staggered_free_flow_geometry.cc @@ -30,7 +30,10 @@ #include <dune/common/test/iteratortest.hh> #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> + +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> +#include <dumux/common/intersectionmapper.hh> #include <dumux/common/intersectionmapper.hh> #include <dumux/common/defaultmappertraits.hh> @@ -60,8 +63,9 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + std::cout << "Checking the FVGeometries, SCVs and SCV faces" << std::endl; // parse command line arguments and input file diff --git a/test/discretization/staggered/test_staggeredfvgeometry.cc b/test/discretization/staggered/test_staggeredfvgeometry.cc index bf1377d0ffba6f6d38293299b121649feb0453dc..1639c4d065e16697bbf62561a36b9f9b433a75a7 100644 --- a/test/discretization/staggered/test_staggeredfvgeometry.cc +++ b/test/discretization/staggered/test_staggeredfvgeometry.cc @@ -30,6 +30,8 @@ #include <dune/common/test/iteratortest.hh> #include <dune/grid/utility/structuredgridfactory.hh> #include <dune/grid/yaspgrid.hh> + +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/common/intersectionmapper.hh> @@ -96,8 +98,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/discretization/test_fvgridvariables.cc b/test/discretization/test_fvgridvariables.cc index 5b01b6411eb9383e1adaee10f8dfae751ab0f821..2931cbe8e5843c1bf33e3ef5a2677e346778fab6 100644 --- a/test/discretization/test_fvgridvariables.cc +++ b/test/discretization/test_fvgridvariables.cc @@ -30,6 +30,7 @@ // we use the 1p type tag here in order not to be obliged // to define grid flux vars cache & vol vars cache... +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/fvproblem.hh> #include <dumux/common/parameters.hh> @@ -95,10 +96,10 @@ public: int main (int argc, char *argv[]) { - Dune::MPIHelper::instance(argc, argv); - using namespace Dumux; - Dumux::Parameters::init(argc, argv); + + initialize(argc, argv); + Parameters::init(argc, argv); using TypeTag = Properties::TTag::GridVariablesTestBox; using Grid = GetPropType<TypeTag, Properties::Grid>; diff --git a/test/discretization/test_walldistance.cc b/test/discretization/test_walldistance.cc index afd9152c0227c654c9be8927d49382d6d3264c54..79775bcf62b20d21317a448b3430953c482f8654 100644 --- a/test/discretization/test_walldistance.cc +++ b/test/discretization/test_walldistance.cc @@ -25,10 +25,10 @@ #include <iostream> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> #include <dune/grid/io/file/vtk.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/grid/gridmanager.hh> #include <dumux/geometry/distancefield.hh> @@ -135,8 +135,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - Dune::MPIHelper::instance(argc, argv); + // initialize MPI and multithreading environemnt + // finalize is done automatically on exit + initialize(argc, argv); // initialize params Parameters::init(argc, argv); diff --git a/test/freeflow/navierstokes/angeli/main.cc b/test/freeflow/navierstokes/angeli/main.cc index 1911467ee65194bfaafc25d33a8063bfec24b1f9..6fc2168be6f4aa68245b569d1808ac5e0c28051c 100644 --- a/test/freeflow/navierstokes/angeli/main.cc +++ b/test/freeflow/navierstokes/angeli/main.cc @@ -32,6 +32,8 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> + +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -59,9 +61,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::AngeliTestMomentum; using MassTypeTag = Properties::TTag::AngeliTestMass; - - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/channel/1d/main.cc b/test/freeflow/navierstokes/channel/1d/main.cc index a6c9d6bd917cad99948a6d4cab0c3249a70a4619..3846f431363496c1b8af658eef803897c1567101 100644 --- a/test/freeflow/navierstokes/channel/1d/main.cc +++ b/test/freeflow/navierstokes/channel/1d/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::NavierStokesAnalyticMomentum; using MassTypeTag = Properties::TTag::NavierStokesAnalyticMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/channel/2d/main.cc b/test/freeflow/navierstokes/channel/2d/main.cc index bf11543f8f4b896f17b05138f950ceaaaafa44ec..990ff82dfc58453b85ef01123ab9d0512a8eb9a6 100644 --- a/test/freeflow/navierstokes/channel/2d/main.cc +++ b/test/freeflow/navierstokes/channel/2d/main.cc @@ -34,6 +34,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -60,8 +61,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::ChannelTestMomentum; using MassTypeTag = Properties::TTag::ChannelTestMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/channel/3d/main.cc b/test/freeflow/navierstokes/channel/3d/main.cc index 4a3e6912a1fe19f6a4b254071bd1710b24645301..a01eb6b32212d62dd6580ab1a4811ca29bf75e1c 100644 --- a/test/freeflow/navierstokes/channel/3d/main.cc +++ b/test/freeflow/navierstokes/channel/3d/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::ThreeDChannelTestMomentum; using MassTypeTag = Properties::TTag::ThreeDChannelTestMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/channel/pipe/main.cc b/test/freeflow/navierstokes/channel/pipe/main.cc index ca190e39c3e552c8492e7eb2fedd6037f5b0e879..5931fd3068b6b4df9a26fd08465784aeb8329c4a 100644 --- a/test/freeflow/navierstokes/channel/pipe/main.cc +++ b/test/freeflow/navierstokes/channel/pipe/main.cc @@ -24,6 +24,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/float_cmp.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::PipeFlowMomentum; using MassTypeTag = Properties::TTag::PipeFlowMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/freeflow/navierstokes/donea/main.cc b/test/freeflow/navierstokes/donea/main.cc index 7986ac5af9fe636e2ba6b1690114c82d4b3eb8b7..b73053a1f059c8d0a21d40dda37ccd85c29b384b 100644 --- a/test/freeflow/navierstokes/donea/main.cc +++ b/test/freeflow/navierstokes/donea/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::DoneaTestMomentum; using MassTypeTag = Properties::TTag::DoneaTestMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/donea/main_momentum.cc b/test/freeflow/navierstokes/donea/main_momentum.cc index c9248abf197f8e894692255268ae49bf1ef5f173..531a4d77b7c7240812ad5743488566d0ffd564e1 100644 --- a/test/freeflow/navierstokes/donea/main_momentum.cc +++ b/test/freeflow/navierstokes/donea/main_momentum.cc @@ -27,6 +27,7 @@ #include <iostream> #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -88,7 +89,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::DoneaTestMomentum; - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); diff --git a/test/freeflow/navierstokes/kovasznay/main.cc b/test/freeflow/navierstokes/kovasznay/main.cc index c8b054a9b24f5eb71dd9053ce6e84d5a46b0e545..bd183065ab9ba5e4c5371a5ec1f6025da5ff21e6 100644 --- a/test/freeflow/navierstokes/kovasznay/main.cc +++ b/test/freeflow/navierstokes/kovasznay/main.cc @@ -34,6 +34,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::KovasznayTest; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/periodic/main.cc b/test/freeflow/navierstokes/periodic/main.cc index aa562e9e9bebc63de1f9361dc5994f63c6cab1b3..5d16b04fb694fcbd1f0b3ce55357ad2cb9882159 100644 --- a/test/freeflow/navierstokes/periodic/main.cc +++ b/test/freeflow/navierstokes/periodic/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -55,8 +56,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::PeriodicTestMomentum; using MassTypeTag = Properties::TTag::PeriodicTestMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokes/sincos/main.cc b/test/freeflow/navierstokes/sincos/main.cc index 563a6536b7d709506c0d7303d6a91ddec1773b70..199d984b1fc813598868c73e4fd2b40cac2063ec 100644 --- a/test/freeflow/navierstokes/sincos/main.cc +++ b/test/freeflow/navierstokes/sincos/main.cc @@ -33,6 +33,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -86,8 +87,9 @@ int main(int argc, char** argv) using MomentumTypeTag = Properties::TTag::SincosTestMomentum; using MassTypeTag = Properties::TTag::SincosTestMass; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokesnc/channel/main.cc b/test/freeflow/navierstokesnc/channel/main.cc index c09c219c9b800fb8e04ce8f6cee73add9514ef7c..c60a13c491c7030e1c9df3d8883bb432f582dbb6 100644 --- a/test/freeflow/navierstokesnc/channel/main.cc +++ b/test/freeflow/navierstokesnc/channel/main.cc @@ -34,6 +34,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::ChannelNCTest; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokesnc/densitydrivenflow/main.cc b/test/freeflow/navierstokesnc/densitydrivenflow/main.cc index b67b928577ad5de1431fe344a7bbe3a06f083363..73b5156314c48ccc574f5cfbac541b63a6940148 100644 --- a/test/freeflow/navierstokesnc/densitydrivenflow/main.cc +++ b/test/freeflow/navierstokesnc/densitydrivenflow/main.cc @@ -34,6 +34,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::DensityDrivenFlow; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/navierstokesnc/maxwellstefan/main.cc b/test/freeflow/navierstokesnc/maxwellstefan/main.cc index 85c094f25abe4981e8db61e560b8eaf0fa255957..ab4d6a53f9cc35a214109de96ad65f19df3ccdf9 100644 --- a/test/freeflow/navierstokesnc/maxwellstefan/main.cc +++ b/test/freeflow/navierstokesnc/maxwellstefan/main.cc @@ -34,6 +34,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::MaxwellStefanNCTest; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/rans/main.cc b/test/freeflow/rans/main.cc index 39e2a52fa781efcef1cfd9b0185e21f19dbf5803..9db95a1dbc64d5825a078d704268a02851851be0 100644 --- a/test/freeflow/rans/main.cc +++ b/test/freeflow/rans/main.cc @@ -36,6 +36,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -75,8 +76,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/ransnc/main.cc b/test/freeflow/ransnc/main.cc index 518732f24bd3e25bcf2f224e9e7b563fc2dd0b7f..130f2b7db37acaf9ecd3c43bd3e9467137d54a5b 100644 --- a/test/freeflow/ransnc/main.cc +++ b/test/freeflow/ransnc/main.cc @@ -34,6 +34,7 @@ #include <dumux/assembly/staggeredfvassembler.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -73,8 +74,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/shallowwater/bowl/main.cc b/test/freeflow/shallowwater/bowl/main.cc index 205b955904654e16c091caaec4b75d5df69c0639..8e86b6819048f2cb6125e3643afe54d87b7762e1 100644 --- a/test/freeflow/shallowwater/bowl/main.cc +++ b/test/freeflow/shallowwater/bowl/main.cc @@ -31,6 +31,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dumux/io/vtkoutputmodule.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -130,8 +131,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::Bowl; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/shallowwater/dambreak/main.cc b/test/freeflow/shallowwater/dambreak/main.cc index 3d9aa15439d37bf62db4a2c30c0b6e9e345c40ba..e875a5ccb8133062bbb20ca8c76827b8b50b9137 100644 --- a/test/freeflow/shallowwater/dambreak/main.cc +++ b/test/freeflow/shallowwater/dambreak/main.cc @@ -30,10 +30,9 @@ #include <dune/common/version.hh> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> #include <dumux/io/vtkoutputmodule.hh> -#include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -64,8 +63,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::DamBreakWet; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/freeflow/shallowwater/poiseuilleflow/main.cc b/test/freeflow/shallowwater/poiseuilleflow/main.cc index 8d31944398240cff2e6c1c99ec6356c3a46262db..33c3d9154e400a645f8e87984f4eab76ae0eef9d 100644 --- a/test/freeflow/shallowwater/poiseuilleflow/main.cc +++ b/test/freeflow/shallowwater/poiseuilleflow/main.cc @@ -22,6 +22,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -42,7 +43,9 @@ int main(int argc, char** argv) { using namespace Dumux; - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/freeflow/shallowwater/roughchannel/main.cc b/test/freeflow/shallowwater/roughchannel/main.cc index a1c1201043ce336e3c33acf644b6967570a7d80c..37259eddcdc67a196c61de379dab6703bf317b12 100644 --- a/test/freeflow/shallowwater/roughchannel/main.cc +++ b/test/freeflow/shallowwater/roughchannel/main.cc @@ -31,6 +31,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dumux/io/vtkoutputmodule.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -54,8 +55,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::RoughChannel; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/geomechanics/elastic/main.cc b/test/geomechanics/elastic/main.cc index c8404011e999ee4b21413c632e10e56a9bd2ca74..d97f25db3c4dc98b23fffdf54d883345d4e01e5e 100644 --- a/test/geomechanics/elastic/main.cc +++ b/test/geomechanics/elastic/main.cc @@ -32,6 +32,7 @@ #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +59,9 @@ int main(int argc, char** argv) // stop time for the entire computation Dune::Timer timer; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/geomechanics/poroelastic/main.cc b/test/geomechanics/poroelastic/main.cc index a49d03f5d51328c63b4694beb7e4cb2ae46e4c11..2eb53bbb6d7bb1d53fb1413dd2c52239d0d9edde 100644 --- a/test/geomechanics/poroelastic/main.cc +++ b/test/geomechanics/poroelastic/main.cc @@ -33,6 +33,7 @@ #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -100,8 +101,9 @@ int main(int argc, char** argv) // stop time for the entire computation Dune::Timer timer; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/geometry/test_0d1d_intersection.cc b/test/geometry/test_0d1d_intersection.cc index 942e06ad1ba35d54f3ade364fff66d67c3430560..391e21e902243804d5a4345cee94c215086b1f50 100644 --- a/test/geometry/test_0d1d_intersection.cc +++ b/test/geometry/test_0d1d_intersection.cc @@ -5,7 +5,6 @@ #include <initializer_list> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/fvector.hh> #include <dumux/geometry/intersectspointgeometry.hh> @@ -90,9 +89,6 @@ void testIntersections(std::vector<bool>& returns) int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); - // collect returns to determine exit code std::vector<bool> returns; diff --git a/test/geometry/test_1d1d_intersection.cc b/test/geometry/test_1d1d_intersection.cc index ef5590f642f22dec0eeee9eeea24d1a749b5aaed..2bb332c9e72ef47d212dc477333883900096e0e6 100644 --- a/test/geometry/test_1d1d_intersection.cc +++ b/test/geometry/test_1d1d_intersection.cc @@ -4,7 +4,6 @@ #include <algorithm> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/fvector.hh> #include <dumux/geometry/geometryintersection.hh> @@ -187,9 +186,6 @@ void testSegmentIntersections(std::vector<bool>& returns) int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); - // collect returns to determine exit code std::vector<bool> returns; diff --git a/test/geometry/test_1d2d_intersection.cc b/test/geometry/test_1d2d_intersection.cc index b98e22cc57f187ffd3609209067b2a0238fbcec6..920af49c612d8a715b0fab37319a532418340208 100644 --- a/test/geometry/test_1d2d_intersection.cc +++ b/test/geometry/test_1d2d_intersection.cc @@ -6,7 +6,6 @@ #include <type_traits> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/fvector.hh> #include <dune/geometry/multilineargeometry.hh> @@ -146,9 +145,6 @@ void performTests(std::vector<bool>& returns, const Quadrilateral& quad, const T int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); - // collect returns to determine exit code std::vector<bool> returns; diff --git a/test/geometry/test_intersectingentity_cartesiangrid.cc b/test/geometry/test_intersectingentity_cartesiangrid.cc index 8b97413dff0795647e37eb84ef6a3f373f7b85ba..7f2930de21809129aafd2a7e82e55ebe0977254e 100644 --- a/test/geometry/test_intersectingentity_cartesiangrid.cc +++ b/test/geometry/test_intersectingentity_cartesiangrid.cc @@ -1,10 +1,10 @@ #include <config.h> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/fvector.hh> #include <dune/common/exceptions.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/geometry/boundingboxtree.hh> #include <dumux/geometry/intersectingentities.hh> @@ -35,7 +35,9 @@ void testIntersectingEntityCartesianGrid() int main(int argc, char* argv[]) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + testIntersectingEntityCartesianGrid<1>(); testIntersectingEntityCartesianGrid<2>(); testIntersectingEntityCartesianGrid<3>(); diff --git a/test/geometry/test_intersectionentityset.cc b/test/geometry/test_intersectionentityset.cc index d75c22544a3045c758cb1f94b28ab39caf52997f..7e424356263a74a772080cfaf194fa836d4bb6c0 100644 --- a/test/geometry/test_intersectionentityset.cc +++ b/test/geometry/test_intersectionentityset.cc @@ -6,13 +6,13 @@ #include <dune/common/exceptions.hh> #include <dune/common/float_cmp.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dune/geometry/type.hh> #include <dune/geometry/multilineargeometry.hh> #include <dune/grid/common/gridfactory.hh> #include <dune/grid/io/file/gmshreader.hh> #include <dune/alugrid/grid.hh> +#include <dumux/common/initialize.hh> #include <dumux/geometry/geometricentityset.hh> #include <dumux/geometry/intersectionentityset.hh> @@ -20,8 +20,8 @@ int main (int argc, char *argv[]) { using namespace Dumux; - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); /////////////////////////////////////////////////////////////////////// // Extract the boundary of a given grid and intersect it with the grid diff --git a/test/io/gridmanager/test_gmshboundaryflag.cc b/test/io/gridmanager/test_gmshboundaryflag.cc index 41529064581e57b9b1ac6221006ff27298f5b441..6f824c137638e80fbb2816859ef1b0b49acb5d3b 100644 --- a/test/io/gridmanager/test_gmshboundaryflag.cc +++ b/test/io/gridmanager/test_gmshboundaryflag.cc @@ -26,11 +26,11 @@ #if HAVE_DUNE_ALUGRID #include <dune/alugrid/grid.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dumux/common/parameters.hh> #include <dumux/common/boundaryflag.hh> #include <dumux/io/grid/gridmanager.hh> +#include <dumux/common/initialize.hh> #include <dumux/discretization/box.hh> #include <dumux/discretization/cctpfa.hh> @@ -64,7 +64,8 @@ int main(int argc, char** argv) { using namespace Dumux; - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); Parameters::init(argc, argv, "test_gmshboundaryflag.input"); diff --git a/test/io/gridmanager/test_gridmanager_cake.cc b/test/io/gridmanager/test_gridmanager_cake.cc index fbe0a4df4192594145cc77021a951d01b2705643..ea2b277f075aef9d52f02fb50113b5aa2bdf5e56 100644 --- a/test/io/gridmanager/test_gridmanager_cake.cc +++ b/test/io/gridmanager/test_gridmanager_cake.cc @@ -25,10 +25,10 @@ #include <string> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> #include <dune/grid/io/file/vtk.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/grid/cakegridmanager.hh> @@ -76,8 +76,8 @@ void testCakeGridManager(const std::string& name) int main(int argc, char** argv) { - // initialize MPI, finalize is done automatically on exit - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // first read parameters from input file Dumux::Parameters::init(argc, argv, "test_gridmanager_cake.input"); diff --git a/test/io/gridmanager/test_gridmanager_dgf.cc b/test/io/gridmanager/test_gridmanager_dgf.cc index b4e4e2fbe73863aec26183bbd33a156f5f98fb72..cc20787c90015d1fa035c1609e616634cb6311b0 100644 --- a/test/io/gridmanager/test_gridmanager_dgf.cc +++ b/test/io/gridmanager/test_gridmanager_dgf.cc @@ -22,14 +22,15 @@ #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include "gridmanagertests.hh" int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); Dumux::Parameters::init(argc, argv, "test_gridmanager_dgf.input"); diff --git a/test/io/gridmanager/test_gridmanager_gmsh_3d.cc b/test/io/gridmanager/test_gridmanager_gmsh_3d.cc index 0303aa2d4782062ec71261e30b9d55e0a49b5da0..d89ce8b6cdbcca70a366b30af1d0132f4f56748a 100644 --- a/test/io/gridmanager/test_gridmanager_gmsh_3d.cc +++ b/test/io/gridmanager/test_gridmanager_gmsh_3d.cc @@ -22,7 +22,7 @@ #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include "gridmanagertests.hh" @@ -30,8 +30,8 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); Parameters::init(argc, argv, "test_gridmanager_gmsh_3d.input"); const auto name = getParam<std::string>("Problem.Name"); const auto refine = Dumux::getParam<bool>("Grid.Refine", true); diff --git a/test/io/gridmanager/test_gridmanager_gmsh_e_markers.cc b/test/io/gridmanager/test_gridmanager_gmsh_e_markers.cc index 4fd3826811c5705d68c02b9a64886399ef8b6f21..57b842a9a0f9f419103904a873ee29b9da7056a8 100644 --- a/test/io/gridmanager/test_gridmanager_gmsh_e_markers.cc +++ b/test/io/gridmanager/test_gridmanager_gmsh_e_markers.cc @@ -22,14 +22,15 @@ #include <config.h> #include <iostream> -#include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include "gridmanagertests.hh" int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); Dumux::Parameters::init(argc, argv, "test_gridmanager_gmsh_e_markers.input"); diff --git a/test/io/gridmanager/test_gridmanager_mmesh.cc b/test/io/gridmanager/test_gridmanager_mmesh.cc index 4d6805ad63d440eb5558a3a94b8fdf68aa202d29..e273c59ad04a49195ad7b64b2a88cc3c2e3b3ff5 100644 --- a/test/io/gridmanager/test_gridmanager_mmesh.cc +++ b/test/io/gridmanager/test_gridmanager_mmesh.cc @@ -20,16 +20,16 @@ */ #include <config.h> -#include <dune/common/parallel/mpihelper.hh> #include <dune/grid/io/file/vtk.hh> +#include <dumux/common/initialize.hh> #include <dumux/io/grid/gridmanager_mmesh.hh> int main(int argc, char** argv) { using namespace Dumux; - // Initialize MPI, finalize is done automatically on exit. - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // First set parameters Dumux::Parameters::init([](auto& params){ diff --git a/test/io/gridmanager/test_gridmanager_subgrid.cc b/test/io/gridmanager/test_gridmanager_subgrid.cc index 0426bc008b0faed96f5ad58f8147eca5a34e404a..283ad4ca17ea741509fd3bfd496339dfa6576eb2 100644 --- a/test/io/gridmanager/test_gridmanager_subgrid.cc +++ b/test/io/gridmanager/test_gridmanager_subgrid.cc @@ -23,7 +23,6 @@ #include <cmath> #include <string> -#include <dune/common/parallel/mpihelper.hh> #include <dune/common/fvector.hh> #include <dune/common/timer.hh> #include <dune/grid/io/file/vtk.hh> @@ -33,6 +32,7 @@ #endif #include <dumux/io/grid/gridmanager_sub.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> @@ -143,8 +143,8 @@ int main(int argc, char** argv) { using namespace Dumux; - // Initialize MPI, finalize is done automatically on exit. - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // First read parameters from input file. Dumux::Parameters::init(argc, argv); diff --git a/test/io/gridmanager/test_gridmanager_vtk.cc b/test/io/gridmanager/test_gridmanager_vtk.cc index bde55e77b56a6878639c3e9121786dcc3fa30d40..b935dc363eb39edbf54ae4e1275c68c027fae2a6 100644 --- a/test/io/gridmanager/test_gridmanager_vtk.cc +++ b/test/io/gridmanager/test_gridmanager_vtk.cc @@ -24,9 +24,9 @@ #include <algorithm> #include <tuple> -#include <dune/common/parallel/mpihelper.hh> #include <dune/grid/io/file/vtk.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/grid/gridmanager.hh> @@ -88,7 +88,8 @@ void testVTKReader(const std::string& gridName) int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); Dumux::Parameters::init(argc, argv, "test_gridmanager_vtk.input"); diff --git a/test/io/gridmanager/test_pnm_gridmanager.cc b/test/io/gridmanager/test_pnm_gridmanager.cc index 7ed07af8f561fb68b1fb8189b64f91f70487d58f..b20fcd1baf36c11365d8b2e76402b0f65f448451 100644 --- a/test/io/gridmanager/test_pnm_gridmanager.cc +++ b/test/io/gridmanager/test_pnm_gridmanager.cc @@ -23,6 +23,7 @@ */ #include "config.h" +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/io/grid/porenetwork/gridmanager.hh> @@ -134,8 +135,9 @@ int main(int argc, char** argv) try { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/io/gridmanager/test_pnm_subgriddata.cc b/test/io/gridmanager/test_pnm_subgriddata.cc index 5f3775f64cabbb31942a856f06c180b897efb283..10f632cebb1a5fbb9bce718d1277b1631cd64315 100644 --- a/test/io/gridmanager/test_pnm_subgriddata.cc +++ b/test/io/gridmanager/test_pnm_subgriddata.cc @@ -23,6 +23,7 @@ */ #include "config.h" +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/grid/porenetwork/gridmanager.hh> #include <dumux/io/grid/gridmanager_sub.hh> @@ -35,8 +36,8 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); // parse command line arguments Parameters::init(argc, argv); diff --git a/test/io/vtk/test_vtk_staggeredfreeflowpvnames.cc b/test/io/vtk/test_vtk_staggeredfreeflowpvnames.cc index f502115f29f26edb4417d4ca58e7dac13431c1c8..e9b60a2f85c43e8e5f4b31fd6a6e45bd1049d95c 100644 --- a/test/io/vtk/test_vtk_staggeredfreeflowpvnames.cc +++ b/test/io/vtk/test_vtk_staggeredfreeflowpvnames.cc @@ -33,6 +33,7 @@ #include <dune/common/float_cmp.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/dumuxmessage.hh> @@ -299,8 +300,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/io/vtk/test_vtkoutputmodule.cc b/test/io/vtk/test_vtkoutputmodule.cc index b7ae1d4758be87212ae2cb2287224744a5251ed8..de763412c4a7c59b792868d4391f944b4dd07a62 100644 --- a/test/io/vtk/test_vtkoutputmodule.cc +++ b/test/io/vtk/test_vtkoutputmodule.cc @@ -2,9 +2,9 @@ #include <array> -#include <dune/common/parallel/mpihelper.hh> #include <dune/grid/yaspgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/vtkoutputmodule.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> @@ -13,7 +13,8 @@ int main(int argc, char** argv) { using namespace Dumux; - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + initialize(argc, argv); Parameters::init([](Dune::ParameterTree& params) { diff --git a/test/io/vtk/test_vtkreader.cc b/test/io/vtk/test_vtkreader.cc index fe05d7533ccdffdd3f5a39cce784f0cf179d7dea..adc90ec5787a37dac60704dffb6853c544e72a60 100644 --- a/test/io/vtk/test_vtkreader.cc +++ b/test/io/vtk/test_vtkreader.cc @@ -23,7 +23,6 @@ #include <iostream> #include <memory> -#include <dune/common/parallel/mpihelper.hh> #include <dune/grid/common/mcmgmapper.hh> #include <dune/grid/io/file/vtk/vtkwriter.hh> @@ -37,12 +36,14 @@ #include <dune/foamgrid/foamgrid.hh> #endif +#include <dumux/common/initialize.hh> #include <dumux/common/exceptions.hh> #include <dumux/io/vtk/vtkreader.hh> int main(int argc, char** argv) { - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); if (argc != 3) DUNE_THROW(Dune::IOError, "Needs two arguments, the vtk file name and an output file base name"); diff --git a/test/linear/test_linearsolver.cc b/test/linear/test_linearsolver.cc index 67e51b91115836d082cfcff7d01013ed334ec356..441d709893c1b2a72b2e029268eadcc7b941d263 100644 --- a/test/linear/test_linearsolver.cc +++ b/test/linear/test_linearsolver.cc @@ -17,6 +17,7 @@ #include <dune/istl/test/laplacian.hh> #include <dune/istl/paamg/test/anisotropic.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/discretization/method.hh> #include <dumux/linear/linearsolvertraits.hh> @@ -55,7 +56,8 @@ int main(int argc, char* argv[]) { using namespace Dumux; - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); Parameters::init(argc, argv, "params.input"); diff --git a/test/multidomain/boundary/darcydarcy/1p_1p/main.cc b/test/multidomain/boundary/darcydarcy/1p_1p/main.cc index 1005c4dcca0f1b1cf35697455791cd2b63470465..01877d9066de1e8921c5806281479ce002ce4d17 100644 --- a/test/multidomain/boundary/darcydarcy/1p_1p/main.cc +++ b/test/multidomain/boundary/darcydarcy/1p_1p/main.cc @@ -34,6 +34,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +59,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/darcydarcy/1p_2p/main.cc b/test/multidomain/boundary/darcydarcy/1p_2p/main.cc index 525c22f537ee4e7724bcd87f09c5f3c8a434520b..6850c29f4182fcc719a31b29d2ea75749f0ff3f1 100644 --- a/test/multidomain/boundary/darcydarcy/1p_2p/main.cc +++ b/test/multidomain/boundary/darcydarcy/1p_2p/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -48,8 +49,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/freeflowporenetwork/1p_1p/main.cc b/test/multidomain/boundary/freeflowporenetwork/1p_1p/main.cc index 4f63c8496cc11ba7f358b436ad0cf4c5af911e40..ac7c258ee4343b9ab82a10e3d227f1fb92211045 100644 --- a/test/multidomain/boundary/freeflowporenetwork/1p_1p/main.cc +++ b/test/multidomain/boundary/freeflowporenetwork/1p_1p/main.cc @@ -30,6 +30,7 @@ #include <dune/common/timer.hh> #include <dumux/assembly/diffmethod.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -72,8 +73,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/freeflowporousmedium/1p_1p/convergence/main.cc b/test/multidomain/boundary/freeflowporousmedium/1p_1p/convergence/main.cc index 8a9a0044d80d02a2b5b3d8d08ee2301cf9edc6ae..b06352f32984befe3656e1184527a5d285f1a6d4 100644 --- a/test/multidomain/boundary/freeflowporousmedium/1p_1p/convergence/main.cc +++ b/test/multidomain/boundary/freeflowporousmedium/1p_1p/convergence/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -156,8 +157,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/freeflowporousmedium/1p_1p/main.cc b/test/multidomain/boundary/freeflowporousmedium/1p_1p/main.cc index 17abbaac06ee5cc9eb75793adddd40c1e1433c10..9b06d2589312e4ae9543422498885d43761ffbf7 100644 --- a/test/multidomain/boundary/freeflowporousmedium/1p_1p/main.cc +++ b/test/multidomain/boundary/freeflowporousmedium/1p_1p/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/diffusionlawcomparison/main.cc b/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/diffusionlawcomparison/main.cc index 9e6bc2159a13acf80ce22d27f136a7c2b77f06ca..5b24f8bf54873657f7f7ed9a13f760f9cdecdd62 100644 --- a/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/diffusionlawcomparison/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/diffusionlawcomparison/main.cc @@ -31,6 +31,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -52,8 +53,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/main.cc b/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/main.cc index 4c2ac5bfa201558e6a8a7e2d354032460e25e20c..34d962664057016761204bb324e513045e7a4919 100644 --- a/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p2c_1p2c/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p2c_2p2c/main.cc b/test/multidomain/boundary/stokesdarcy/1p2c_2p2c/main.cc index eed0b9f2940823adf019fcd0b4f819454a21a41b..e74582aa5787de71d12fe3c9c5fbaa753c88014f 100644 --- a/test/multidomain/boundary/stokesdarcy/1p2c_2p2c/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p2c_2p2c/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p3c_1p3c/main.cc b/test/multidomain/boundary/stokesdarcy/1p3c_1p3c/main.cc index ed7fdbf2491cd25e252e2f230b2e4e7895b69b6e..407d8638cc88527e43e97c07c73ec8d21562d306 100644 --- a/test/multidomain/boundary/stokesdarcy/1p3c_1p3c/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p3c_1p3c/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p_1p/convergencetest/main.cc b/test/multidomain/boundary/stokesdarcy/1p_1p/convergencetest/main.cc index c1d66e50901d8ef2e7f756ae989d65637e0f0827..3eb28eaefcf1531f5bb7397eb3705bedeb4280f9 100644 --- a/test/multidomain/boundary/stokesdarcy/1p_1p/convergencetest/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p_1p/convergencetest/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -155,8 +156,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p_1p/main.cc b/test/multidomain/boundary/stokesdarcy/1p_1p/main.cc index b4c3684eabe05da1a62f348f88b42355549ab3f0..cb2221f26613ecfef11eac99f934aa11366bb25d 100644 --- a/test/multidomain/boundary/stokesdarcy/1p_1p/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p_1p/main.cc @@ -31,6 +31,7 @@ #include <dune/common/timer.hh> #include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/partial.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/boundary/stokesdarcy/1p_2p/main.cc b/test/multidomain/boundary/stokesdarcy/1p_2p/main.cc index 946fedec05185cb46c9c1a58b001e30c4f3eac85..7cc472f576c451c3330c8213ecb4b6bfdfb81640 100644 --- a/test/multidomain/boundary/stokesdarcy/1p_2p/main.cc +++ b/test/multidomain/boundary/stokesdarcy/1p_2p/main.cc @@ -30,6 +30,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc b/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc index 2bbddbda7f4f09df43ee32824a7df34f0d4d8c5b..b8fa50bd275f4d5f7c5a68d08e01c64102a017b7 100644 --- a/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc +++ b/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc @@ -31,6 +31,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -139,8 +140,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/embedded/1d3d/1p_1p/main.cc b/test/multidomain/embedded/1d3d/1p_1p/main.cc index 05c21c553786f49bc31bd2160b86069576c57fc4..7ed3cece2f7d24a491ef9f5e42164d7f44c49c04 100644 --- a/test/multidomain/embedded/1d3d/1p_1p/main.cc +++ b/test/multidomain/embedded/1d3d/1p_1p/main.cc @@ -31,6 +31,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/embedded/1d3d/1p_richards/main.cc b/test/multidomain/embedded/1d3d/1p_richards/main.cc index 941471f3a40b21079cbc28ff5de3b8b63aae7cfd..53dbfbb8c70173fe3e364ea3e8e2f7e8791003be 100644 --- a/test/multidomain/embedded/1d3d/1p_richards/main.cc +++ b/test/multidomain/embedded/1d3d/1p_richards/main.cc @@ -31,6 +31,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +59,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/embedded/2d3d/1p_1p/main.cc b/test/multidomain/embedded/2d3d/1p_1p/main.cc index 9ed63755aa547cf809ac14258442ca729003c6d7..186ed3cc17a98dcfc011e176a2914f0b51ab84cd 100644 --- a/test/multidomain/embedded/2d3d/1p_1p/main.cc +++ b/test/multidomain/embedded/2d3d/1p_1p/main.cc @@ -31,6 +31,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -107,8 +108,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/1p_1p/analytical/main.cc b/test/multidomain/facet/1p_1p/analytical/main.cc index 5961779774c4bf9d1f60bfc10bd0ed8b6b466a65..ee2973b7a7c761412143faf1df26cbb8234efd6e 100644 --- a/test/multidomain/facet/1p_1p/analytical/main.cc +++ b/test/multidomain/facet/1p_1p/analytical/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/geometry/quadraturerules.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -155,8 +156,9 @@ int main(int argc, char** argv) ////////////////////////////////////////////////////// ////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/1p_1p/gravity/main.cc b/test/multidomain/facet/1p_1p/gravity/main.cc index eae2efaf32feae92513d0a0cb1ded259f024a829..f88d2a81abcac90292f802d78cbad87c4c2e9e4e 100644 --- a/test/multidomain/facet/1p_1p/gravity/main.cc +++ b/test/multidomain/facet/1p_1p/gravity/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -52,8 +53,9 @@ int main(int argc, char** argv) ////////////////////////////////////////////////////// ////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/1p_1p/linearprofile/main.cc b/test/multidomain/facet/1p_1p/linearprofile/main.cc index 938953174d2e1c134b8cf6af6f001b221790c95f..3590e1a813458175acb1bf0b6b344497c52bacef 100644 --- a/test/multidomain/facet/1p_1p/linearprofile/main.cc +++ b/test/multidomain/facet/1p_1p/linearprofile/main.cc @@ -27,6 +27,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) ////////////////////////////////////////////////////// ////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/1p_1p/threedomain/main.cc b/test/multidomain/facet/1p_1p/threedomain/main.cc index 837dbf3f9b958f80647f902d50884163e0c7e3b1..6d050b23553543125600cdc39a1de289b04d3f29 100644 --- a/test/multidomain/facet/1p_1p/threedomain/main.cc +++ b/test/multidomain/facet/1p_1p/threedomain/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -89,8 +90,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/1pnc_1pnc/main.cc b/test/multidomain/facet/1pnc_1pnc/main.cc index fd905ba855adc00b939acde55def1ba4cc33f527..51517716c5776249137ebc6cc0c4c20bfb0c3f22 100644 --- a/test/multidomain/facet/1pnc_1pnc/main.cc +++ b/test/multidomain/facet/1pnc_1pnc/main.cc @@ -27,6 +27,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -85,8 +86,9 @@ int main(int argc, char** argv) using BulkTypeTag = Properties::TTag::BULKTYPETAG; using FacetTypeTag = Properties::TTag::FACETTYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/facet/test_facetcouplingmapper.cc b/test/multidomain/facet/test_facetcouplingmapper.cc index 9ff55156a135fd534201303c9e090f628b561e22..18ff376421108b594d8cd91883f7fd31f16a22b7 100644 --- a/test/multidomain/facet/test_facetcouplingmapper.cc +++ b/test/multidomain/facet/test_facetcouplingmapper.cc @@ -33,6 +33,7 @@ #include <dune/alugrid/grid.hh> #include <dune/foamgrid/foamgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/discretization/method.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> @@ -128,8 +129,8 @@ auto makeBulkFVGridGeometry(const GridManager& gridManager, // main program int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // parse command line argument parameters Dumux::Parameters::init(argc, argv); diff --git a/test/multidomain/facet/test_facetcouplingmapper_boundary.cc b/test/multidomain/facet/test_facetcouplingmapper_boundary.cc index 5d0bf9ac4535993d79875c2b210fb8a4912c8f69..efe11c18a7a0f725e2c545786c24ff5a9d8b2982 100644 --- a/test/multidomain/facet/test_facetcouplingmapper_boundary.cc +++ b/test/multidomain/facet/test_facetcouplingmapper_boundary.cc @@ -32,6 +32,7 @@ #include <dune/alugrid/grid.hh> #include <dune/foamgrid/foamgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/discretization/cellcentered/tpfa/fvgridgeometry.hh> #include <dumux/multidomain/facet/gridmanager.hh> @@ -52,8 +53,8 @@ bool checkEquality(const Pos1& p1, const Pos2& p2, typename Pos1::value_type eps int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // initialize parameter tree Dumux::Parameters::init(argc, argv); diff --git a/test/multidomain/facet/test_gridmanager.cc b/test/multidomain/facet/test_gridmanager.cc index ff0fec97d075bdcb1ff5aff85e1d6a9e976c6337..29a17a7f30775a724c9cded7e1b80f341ae78d40 100644 --- a/test/multidomain/facet/test_gridmanager.cc +++ b/test/multidomain/facet/test_gridmanager.cc @@ -35,6 +35,7 @@ #include <dune/grid/io/file/vtk/vtkwriter.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/io/vtk/function.hh> #include <dumux/multidomain/facet/gridmanager.hh> @@ -45,8 +46,8 @@ int main (int argc, char *argv[]) { - // maybe initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // parse command line argument parameters Dumux::Parameters::init(argc, argv); diff --git a/test/multidomain/facet/test_vertexmapper.cc b/test/multidomain/facet/test_vertexmapper.cc index 2583e183e46a12d6069be52032dde834683b2121..5c210e026c280847d468700364c0b2615c01a9ff 100644 --- a/test/multidomain/facet/test_vertexmapper.cc +++ b/test/multidomain/facet/test_vertexmapper.cc @@ -31,6 +31,7 @@ #include <dune/grid/io/file/vtk/vtkwriter.hh> #include <dumux/io/vtk/function.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/multidomain/facet/gridmanager.hh> #include <dumux/multidomain/facet/codimonegridadapter.hh> @@ -94,8 +95,8 @@ GlobalPosition getDisplacement(const GlobalPosition& pos) int main (int argc, char *argv[]) { - // initialize mpi - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // initialize parameter tree Dumux::Parameters::init(argc, argv); diff --git a/test/multidomain/facet/tracer_tracer/main.cc b/test/multidomain/facet/tracer_tracer/main.cc index e64f9e1b5c1c5fda177ab2cdc3dc7e044b4f9cbc..e46a5722a20deecfed255c116b422f36217f49c4 100644 --- a/test/multidomain/facet/tracer_tracer/main.cc +++ b/test/multidomain/facet/tracer_tracer/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/geometry/quadraturerules.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -155,8 +156,9 @@ int main(int argc, char** argv) ////////////////////////////////////////////////////// ////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/poromechanics/el1p/main.cc b/test/multidomain/poromechanics/el1p/main.cc index 1f2b8fe29b7bcf765ffc01a94387aaf6e8149074..384e8a6e5a752c35f77c8a27c55b361c4a2d6f14 100644 --- a/test/multidomain/poromechanics/el1p/main.cc +++ b/test/multidomain/poromechanics/el1p/main.cc @@ -27,6 +27,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -48,8 +49,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/multidomain/poromechanics/el2p/main.cc b/test/multidomain/poromechanics/el2p/main.cc index e3ced45b3cb14124397fddc06fe1d3fcec675c59..462dcdf16f8e1bf9a59b29bb859287e425172d11 100644 --- a/test/multidomain/poromechanics/el2p/main.cc +++ b/test/multidomain/poromechanics/el2p/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -49,8 +50,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/nonlinear/newton/test_newton.cc b/test/nonlinear/newton/test_newton.cc index 23f631a92a7f389e22c2de1a3568d9e0ba2d9276..07bf3285a01968df560daac1b3e913bbc9484f38 100644 --- a/test/nonlinear/newton/test_newton.cc +++ b/test/nonlinear/newton/test_newton.cc @@ -7,8 +7,9 @@ #include <dune/common/exceptions.hh> #include <dune/common/float_cmp.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dune/istl/bvector.hh> + +#include <dumux/common/initialize.hh> #include <dumux/nonlinear/newtonsolver.hh> /* @@ -78,8 +79,8 @@ int main(int argc, char* argv[]) { using namespace Dumux; - // maybe initialize MPI - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); // use the Newton solver to find a solution to a scalar equation using Assembler = MockScalarAssembler; diff --git a/test/parallel/partition/main.cc b/test/parallel/partition/main.cc index eb7d7473044b9239cacc648253f0e978552ffedf..92c0571980c6f9038ac70fe241afe9f37ec57498 100644 --- a/test/parallel/partition/main.cc +++ b/test/parallel/partition/main.cc @@ -29,6 +29,7 @@ #include <dune/grid/io/file/gmshreader.hh> #include <dune/grid/io/file/vtk/vtkwriter.hh> +#include <dumux/common/initialize.hh> #include <dumux/parallel/scotchpartitioner.hh> template<class GV> @@ -114,7 +115,9 @@ void test3D(int rank, std::size_t numProcessors) int main (int argc , char **argv) { - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); test2D(mpiHelper.rank(), mpiHelper.size()); test3D(mpiHelper.rank(), mpiHelper.size()); diff --git a/test/porenetwork/1p/main.cc b/test/porenetwork/1p/main.cc index c7f99a228b606d496bb80e056e2fec50ec51f20c..9644d3bb8ccde6bbe42404d49923de79db315deb 100644 --- a/test/porenetwork/1p/main.cc +++ b/test/porenetwork/1p/main.cc @@ -32,6 +32,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dumux/assembly/fvassembler.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -48,8 +49,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::PNMOnePProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porenetwork/1p/noncreepingflow/main.cc b/test/porenetwork/1p/noncreepingflow/main.cc index 5547c15e6ed90ab152dc8142bb040cee90ffb658..40746fae1a1bc50150800110fb3002e9a9c82af5 100644 --- a/test/porenetwork/1p/noncreepingflow/main.cc +++ b/test/porenetwork/1p/noncreepingflow/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dumux/assembly/fvassembler.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -46,8 +47,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::PNMOnePNonCreepingProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porenetwork/1p/nonisothermal/main.cc b/test/porenetwork/1p/nonisothermal/main.cc index 0aa985d3d488cc4aec8ce9c35b5e83f5386fff79..82aa340af35472c4dc1a4c946a9e4d09a5cccdd5 100644 --- a/test/porenetwork/1p/nonisothermal/main.cc +++ b/test/porenetwork/1p/nonisothermal/main.cc @@ -31,6 +31,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dumux/assembly/fvassembler.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -49,8 +50,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::PNMOnePProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porenetwork/1pnc/main.cc b/test/porenetwork/1pnc/main.cc index bb4b6ece946e18ceafb67fb35e27352b0659de03..5a9435615049a11c54f4f6c0cebbf7401433c869 100644 --- a/test/porenetwork/1pnc/main.cc +++ b/test/porenetwork/1pnc/main.cc @@ -31,6 +31,7 @@ #include <dune/grid/io/file/dgfparser/dgfexception.hh> #include <dumux/assembly/fvassembler.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/dumuxmessage.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> @@ -48,8 +49,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::PNMOnePTwoCProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porenetwork/2p/main.cc b/test/porenetwork/2p/main.cc index 40b0c474cbceb5b30596764b31136c999f292e1a..4a017e43381ef293a04949aa0e70a2b7036a8969 100644 --- a/test/porenetwork/2p/main.cc +++ b/test/porenetwork/2p/main.cc @@ -32,6 +32,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dumux/assembly/fvassembler.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -48,8 +49,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::DrainageProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porenetwork/2p/static/main.cc b/test/porenetwork/2p/static/main.cc index 8b160aaf42f2a946f4cb44067a483d5177d4d39f..e88754fb8a9b0a727a00ddc76e7deef8d6a5d26a 100644 --- a/test/porenetwork/2p/static/main.cc +++ b/test/porenetwork/2p/static/main.cc @@ -32,6 +32,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dune/grid/io/file/vtk/vtksequencewriter.hh> + #include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -78,8 +79,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::DrainageProblem; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/compressible/instationary/main.cc b/test/porousmediumflow/1p/compressible/instationary/main.cc index 905eab53d7612aec451200d310f2e725abc27c9c..9474eaf27e235d02057a7efbb3ce0bec746a0007 100644 --- a/test/porousmediumflow/1p/compressible/instationary/main.cc +++ b/test/porousmediumflow/1p/compressible/instationary/main.cc @@ -33,6 +33,7 @@ #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/compressible/instationary/main_experimental.cc b/test/porousmediumflow/1p/compressible/instationary/main_experimental.cc index 14c403ded6a3776f45d0b28da9cdc9df3e9097ce..4de5e09016005daf8f46f18af503ab01e3cfee47 100644 --- a/test/porousmediumflow/1p/compressible/instationary/main_experimental.cc +++ b/test/porousmediumflow/1p/compressible/instationary/main_experimental.cc @@ -32,6 +32,7 @@ #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/compressible/stationary/main.cc b/test/porousmediumflow/1p/compressible/stationary/main.cc index fa044c956be1985c5a91edfe64197f2751d55129..c26cbea65419ebe86032ea735212b862415f8b83 100644 --- a/test/porousmediumflow/1p/compressible/stationary/main.cc +++ b/test/porousmediumflow/1p/compressible/stationary/main.cc @@ -34,6 +34,7 @@ #include <dune/grid/io/file/vtk.hh> #include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/convergence/analyticsolution/main.cc b/test/porousmediumflow/1p/convergence/analyticsolution/main.cc index 93713a659ec7cef3e32b79d42c6c508b6802f206..a3a6ba4f9dd8d00151a6b577a01e4bc36bdb0f63 100644 --- a/test/porousmediumflow/1p/convergence/analyticsolution/main.cc +++ b/test/porousmediumflow/1p/convergence/analyticsolution/main.cc @@ -30,6 +30,7 @@ #include <dumux/nonlinear/newtonsolver.hh> #include <dumux/linear/seqsolverbackend.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -83,8 +84,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/convergence/discretesolution/main.cc b/test/porousmediumflow/1p/convergence/discretesolution/main.cc index d3a77e7e13d8486c2563ebd35ebeeb41b7dbc788..1d893a02af238ef538cc47e98f592fd57cf17cf3 100644 --- a/test/porousmediumflow/1p/convergence/discretesolution/main.cc +++ b/test/porousmediumflow/1p/convergence/discretesolution/main.cc @@ -32,6 +32,7 @@ #include <dune/functions/gridfunctions/analyticgridviewfunction.hh> #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/common/properties.hh> #include <dumux/common/dumuxmessage.hh> @@ -52,8 +53,9 @@ int main(int argc, char** argv) static constexpr auto dm = GetPropType<TypeTag, Properties::GridGeometry>::discMethod; static constexpr bool isBox = dm == DiscretizationMethods::box; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/fracture2d3d/main.cc b/test/porousmediumflow/1p/fracture2d3d/main.cc index 93a52887efa075922da14431f4f53b250d4e8b1d..f73e306d87a846d355566cd1e6161829c830ba36 100644 --- a/test/porousmediumflow/1p/fracture2d3d/main.cc +++ b/test/porousmediumflow/1p/fracture2d3d/main.cc @@ -34,6 +34,7 @@ #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -49,30 +50,6 @@ #include <dumux/io/vtkoutputmodule.hh> #include <dumux/io/grid/gridmanager.hh> -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory arguments for this program is:\n" - "\t-TimeManager.TEnd End of the simulation [s] \n" - "\t-TimeManager.DtInitial Initial timestep size [s] \n" - "\t-Grid.File The grid file\n"; - - std::cout << errorMessageOut - << "\n"; - } -} int main(int argc, char** argv) { @@ -82,15 +59,16 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); // try to create a grid (from the given grid file or the input file) GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager; diff --git a/test/porousmediumflow/1p/isothermal/main.cc b/test/porousmediumflow/1p/isothermal/main.cc index f432f51268556d81012454f406753c2d4dc9cb7c..b9aef46c4ebb1e827da1260d6b2e65a17aee1d58 100644 --- a/test/porousmediumflow/1p/isothermal/main.cc +++ b/test/porousmediumflow/1p/isothermal/main.cc @@ -34,6 +34,7 @@ #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,37 +51,6 @@ #include <dumux/io/vtkoutputmodule.hh> #include <dumux/io/grid/gridmanager.hh> -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory arguments for this program is:\n" - "\t-TimeManager.TEnd End of the simulation [s] \n" - "\t-TimeManager.DtInitial Initial timestep size [s] \n" - "\t-Grid.LowerLeft Lower left corner coordinates\n" - "\t-Grid.UpperRight Upper right corner coordinates\n" - "\t-Grid.Cells Number of cells in respective coordinate directions\n" - "\t-SpatialParams.LensLowerLeft coordinates of the lower left corner of the lens [m] \n" - "\t-SpatialParams.LensUpperRight coordinates of the upper right corner of the lens [m] \n" - "\t-SpatialParams.Permeability Permeability of the domain [m^2] \n" - "\t-SpatialParams.PermeabilityLens Permeability of the lens [m^2] \n"; - - std::cout << errorMessageOut - << "\n"; - } -} - int main(int argc, char** argv) { using namespace Dumux; @@ -88,15 +58,16 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); // try to create a grid (from the given grid file or the input file) GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager; diff --git a/test/porousmediumflow/1p/network1d3d/main.cc b/test/porousmediumflow/1p/network1d3d/main.cc index efa270bfc33532e2493970f0ff90b4effcce52d0..aee60d025d42070cdb8335d15c8271581e336bd8 100644 --- a/test/porousmediumflow/1p/network1d3d/main.cc +++ b/test/porousmediumflow/1p/network1d3d/main.cc @@ -34,6 +34,7 @@ #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -49,30 +50,6 @@ #include <dumux/io/vtkoutputmodule.hh> #include <dumux/io/grid/gridmanager.hh> -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory arguments for this program is:\n" - "\t-TimeManager.TEnd End of the simulation [s] \n" - "\t-TimeManager.DtInitial Initial timestep size [s] \n" - "\t-Grid.File The grid file\n"; - - std::cout << errorMessageOut - << "\n"; - } -} int main(int argc, char** argv) { @@ -82,15 +59,16 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); // try to create a grid (from the given grid file or the input file) GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager; diff --git a/test/porousmediumflow/1p/nonisothermal/main.cc b/test/porousmediumflow/1p/nonisothermal/main.cc index 509764ae409c481887dc2763c6e734c36067cdd2..3de7462b0db5a38df78c9b7e8a84ebfff0705ff6 100644 --- a/test/porousmediumflow/1p/nonisothermal/main.cc +++ b/test/porousmediumflow/1p/nonisothermal/main.cc @@ -29,12 +29,11 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include "properties_conduction.hh" #include "properties_convection.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -52,32 +51,6 @@ #include <dumux/io/grid/gridmanager.hh> #include <dumux/io/loadsolution.hh> -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory options for this program is:\n" - "\t-TimeManager.TEnd End of the simulation [s] \n" - "\t-TimeManager.DtInitial Initial timestep size [s] \n" - "\t-Grid.LowerLeft Lower left corner coordinates\n" - "\t-Grid.UpperRight Upper right corner coordinates\n" - "\t-Grid.Cells Number of cells in respective coordinate directions\n"; - std::cout << errorMessageOut - << "\n"; - } -} - int main(int argc, char** argv) { using namespace Dumux; @@ -85,15 +58,16 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); // try to create a grid (from the given grid file or the input file) GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager; diff --git a/test/porousmediumflow/1p/periodicbc/main.cc b/test/porousmediumflow/1p/periodicbc/main.cc index 2ee30d141bc7e1d27a0d458aafeafc6ab3e6bc13..851f03c91be8b6c5a54a60045e798e6d970dd7c8 100644 --- a/test/porousmediumflow/1p/periodicbc/main.cc +++ b/test/porousmediumflow/1p/periodicbc/main.cc @@ -24,16 +24,15 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> #include <dumux/linear/amgbackend.hh> #include <dumux/linear/linearsolvertraits.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +50,9 @@ int main(int argc, char** argv) using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/pointsources/timedependent/main.cc b/test/porousmediumflow/1p/pointsources/timedependent/main.cc index f2705249998bdae50ffc716eb6be183c2a238f2f..855603ba927a3284d50d5083bfdaee7fb1f7b056 100644 --- a/test/porousmediumflow/1p/pointsources/timedependent/main.cc +++ b/test/porousmediumflow/1p/pointsources/timedependent/main.cc @@ -24,16 +24,14 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +55,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + /// maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/pointsources/timeindependent/main.cc b/test/porousmediumflow/1p/pointsources/timeindependent/main.cc index a9c7a570c418979d301c19a36303c4cfd3ce0739..bc6997ff072eb582a6cf00b44d7b216d0562eb5b 100644 --- a/test/porousmediumflow/1p/pointsources/timeindependent/main.cc +++ b/test/porousmediumflow/1p/pointsources/timeindependent/main.cc @@ -24,16 +24,14 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include "properties.hh" +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +55,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1p/rootbenchmark/main.cc b/test/porousmediumflow/1p/rootbenchmark/main.cc index 5a7b7c7d7549c18fb199356d19fae89e0b653480..493fbb7420aeb439ff954e54a0f0b5c3c3157671 100644 --- a/test/porousmediumflow/1p/rootbenchmark/main.cc +++ b/test/porousmediumflow/1p/rootbenchmark/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/linear/seqsolverbackend.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/porousmediumflow/1pnc/1p2c/isothermal/main.cc b/test/porousmediumflow/1pnc/1p2c/isothermal/main.cc index 34223ee0bdf3ca660543569fe670c9120ce6a9b7..e56d874d44012e95b6033291aecc812947232c15 100644 --- a/test/porousmediumflow/1pnc/1p2c/isothermal/main.cc +++ b/test/porousmediumflow/1pnc/1p2c/isothermal/main.cc @@ -24,16 +24,14 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/1p2c/isothermal/saltwaterintrusion/main.cc b/test/porousmediumflow/1pnc/1p2c/isothermal/saltwaterintrusion/main.cc index f6af56eb1f302a22b62191250e9cbc96c4ca7a51..d7b98e55702c417f40b087f2426b0f9d05588066 100644 --- a/test/porousmediumflow/1pnc/1p2c/isothermal/saltwaterintrusion/main.cc +++ b/test/porousmediumflow/1pnc/1p2c/isothermal/saltwaterintrusion/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/1p2c/nonisothermal/conduction/main.cc b/test/porousmediumflow/1pnc/1p2c/nonisothermal/conduction/main.cc index fba087eefb616b74f817c520552218f7440919f7..f950d211343a5275bc958f0ef114833e4e1796ba 100644 --- a/test/porousmediumflow/1pnc/1p2c/nonisothermal/conduction/main.cc +++ b/test/porousmediumflow/1pnc/1p2c/nonisothermal/conduction/main.cc @@ -26,16 +26,14 @@ #include "properties.hh" -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/1p2c/nonisothermal/convection/main.cc b/test/porousmediumflow/1pnc/1p2c/nonisothermal/convection/main.cc index 763261a94778e6f306e6f0a6aadcfcc42cc93edc..39411732f6517fafb53f3300a693636ee2510724 100644 --- a/test/porousmediumflow/1pnc/1p2c/nonisothermal/convection/main.cc +++ b/test/porousmediumflow/1pnc/1p2c/nonisothermal/convection/main.cc @@ -26,16 +26,14 @@ #include "properties.hh" -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/1p2c/nonisothermal/transientbc/main.cc b/test/porousmediumflow/1pnc/1p2c/nonisothermal/transientbc/main.cc index ee07edeb2d781acba886b9894d8cba752ab1174f..321cc5f991876d041a7992d60ff965819279bdf1 100644 --- a/test/porousmediumflow/1pnc/1p2c/nonisothermal/transientbc/main.cc +++ b/test/porousmediumflow/1pnc/1p2c/nonisothermal/transientbc/main.cc @@ -26,16 +26,14 @@ #include "properties.hh" -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/1p3c/main.cc b/test/porousmediumflow/1pnc/1p3c/main.cc index 96bb10e48760abf1dae0547626ec168f047fa947..36c97e84f344a8f9c77192b5441246fa41151948 100644 --- a/test/porousmediumflow/1pnc/1p3c/main.cc +++ b/test/porousmediumflow/1pnc/1p3c/main.cc @@ -24,20 +24,17 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> -#include <dumux/common/defaultusagemessage.hh> #include <dumux/linear/seqsolverbackend.hh> #include <dumux/nonlinear/newtonsolver.hh> @@ -59,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/dispersion/main.cc b/test/porousmediumflow/1pnc/dispersion/main.cc index ba323a42c18ece9148afd86d93798b331f64aa1a..2c93183e28206b5cf43b63167f5ee04fe8b80026 100644 --- a/test/porousmediumflow/1pnc/dispersion/main.cc +++ b/test/porousmediumflow/1pnc/dispersion/main.cc @@ -29,7 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> - +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -55,8 +55,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pnc/nonequilibrium/main.cc b/test/porousmediumflow/1pnc/nonequilibrium/main.cc index 293d4329ce68443db21522c7ae979719b2434989..69c8e50485452ddda9656702666701125eaf79c7 100644 --- a/test/porousmediumflow/1pnc/nonequilibrium/main.cc +++ b/test/porousmediumflow/1pnc/nonequilibrium/main.cc @@ -26,16 +26,14 @@ #include "properties.hh" -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/discretization/method.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -58,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/1pncmin/nonisothermal/main.cc b/test/porousmediumflow/1pncmin/nonisothermal/main.cc index d68144baac03142ca3fda1c8cd64edbf189c57d7..e2fedb88262fcf7891c88eb654e1574074494633 100644 --- a/test/porousmediumflow/1pncmin/nonisothermal/main.cc +++ b/test/porousmediumflow/1pncmin/nonisothermal/main.cc @@ -26,6 +26,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -40,41 +41,6 @@ #include "properties.hh" -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory options for this program is:\n" - "\t-TimeLoop.TEnd End of the simulation [s] \n" - "\t-TimeLoop.DtInitial Initial timestep size [s] \n" - "\t-Grid.UpperRight Upper right corner coordinates\n" - "\t-Grid.Cells Number of cells in respective coordinate directions\n" - "\t-Problem.Name Name for the vtk files \n" - "\t-Problem.PressureInitial Initial Pressure [Pa] \n" - "\t-Problem.TemperatureInitial Initial Temperature [K] \n" - "\t-Problem.VaporInitial Initial vapor mole fraction [-] \n" - "\t-Problem.CaOInitial Initial volumefraction of CaO [-] \n" - "\t-Problem.CaO2H2Initial Initial volumefraction of Ca(OH)2 [-] \n" - "\t-Problem.BoundaryPressure Pressure at the boundary [Pa] \n" - "\t-Problem.BoundaryTemperature Temperature at the boundary [K] \n" - "\t-Problem.BoundaryMoleFraction Vapor molefraction at the boundary [K] \n"; - - std::cout << errorMessageOut - << "\n"; - } -} - int main(int argc, char** argv) { using namespace Dumux; @@ -85,15 +51,16 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); ////////////////////////////////////////////////////////////////////// // try to create a grid (from the given grid file or the input file) diff --git a/test/porousmediumflow/2p/adaptive/main.cc b/test/porousmediumflow/2p/adaptive/main.cc index e207f80eda52c865ce1b3297795b298dd369927d..5ee75e1e5ad1d17facee8d2e9de6b8656b5ea508 100644 --- a/test/porousmediumflow/2p/adaptive/main.cc +++ b/test/porousmediumflow/2p/adaptive/main.cc @@ -24,14 +24,12 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -68,8 +66,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) @@ -201,9 +200,8 @@ int main(int argc, char** argv) { // Note that if we were using point sources, we would have to update the map here as well xOld = x; //!< Overwrite the old solution with the new (resized & interpolated) one - assembler->setJacobianPattern(); //!< Tell the assembler to resize the matrix and set pattern - assembler->setResidualSize(); //!< Tell the assembler to resize the residual gridVariables->updateAfterGridAdaption(x); //!< Initialize the secondary variables to the new (and "new old") solution + assembler->updateAfterGridAdaption(); //!< Tell the assembler that the grid changed problem->computePointSourceMap(); //!< Update the point source map } } diff --git a/test/porousmediumflow/2p/boxdfm/main.cc b/test/porousmediumflow/2p/boxdfm/main.cc index 2e1d923b3ec1ee550da435cb9919cddc32daf810..80f6007a3955ff78a9ddfd9b1f34e76b34cf8f15 100644 --- a/test/porousmediumflow/2p/boxdfm/main.cc +++ b/test/porousmediumflow/2p/boxdfm/main.cc @@ -23,12 +23,12 @@ */ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/foamgrid/foamgrid.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -54,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TwoPIncompressibleBoxDfm; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p/cornerpoint/main.cc b/test/porousmediumflow/2p/cornerpoint/main.cc index c9489e2ea6a893f13db07e9875790ebfecc5a6b5..e356de7c3d231b2f8754f1b10a80bd1f899d96f2 100644 --- a/test/porousmediumflow/2p/cornerpoint/main.cc +++ b/test/porousmediumflow/2p/cornerpoint/main.cc @@ -35,6 +35,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TwoPCornerPoint; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p/fracture/main.cc b/test/porousmediumflow/2p/fracture/main.cc index 7f4cc22c75999a864ad53fe2a0759c076e1443c4..cd6ae151659753460206d601be20cbbd8b7a7bf6 100644 --- a/test/porousmediumflow/2p/fracture/main.cc +++ b/test/porousmediumflow/2p/fracture/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -54,8 +55,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p/incompressible/main.cc b/test/porousmediumflow/2p/incompressible/main.cc index 95157002b249b4c7228d1062925182811cba1328..21010f8defd79d5e142d30f1d153266e0d0deafc 100644 --- a/test/porousmediumflow/2p/incompressible/main.cc +++ b/test/porousmediumflow/2p/incompressible/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p/nonisothermal/main.cc b/test/porousmediumflow/2p/nonisothermal/main.cc index a39f81784eb3e2784f0c3bc7a8d8e9111e31305b..b62477c03f48e77c6626f4cdf23f9babb7cb4a5d 100644 --- a/test/porousmediumflow/2p/nonisothermal/main.cc +++ b/test/porousmediumflow/2p/nonisothermal/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -52,8 +53,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p/rotationsymmetry/main.cc b/test/porousmediumflow/2p/rotationsymmetry/main.cc index 23ba36e42ef4c2568712b1bbd8ecbcfb3cd7682c..48594263154614cdaa594168867b2fd5e21e4c9d 100644 --- a/test/porousmediumflow/2p/rotationsymmetry/main.cc +++ b/test/porousmediumflow/2p/rotationsymmetry/main.cc @@ -22,6 +22,7 @@ #include <iostream> #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> @@ -39,8 +40,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/porousmediumflow/2p1c/main.cc b/test/porousmediumflow/2p1c/main.cc index 885a6bc2670ac5476c79a68ff6e1455a0364a70e..e0a5bfac07ea73157480d1308c26be13a527303a 100644 --- a/test/porousmediumflow/2p1c/main.cc +++ b/test/porousmediumflow/2p1c/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p2c/chemicalnonequilibrium/main.cc b/test/porousmediumflow/2p2c/chemicalnonequilibrium/main.cc index 06502ee31d7fe80f7e2371d0eecd4cbc13d167ae..ed4b9d6869b3b68b8046978a169db1e835083479 100644 --- a/test/porousmediumflow/2p2c/chemicalnonequilibrium/main.cc +++ b/test/porousmediumflow/2p2c/chemicalnonequilibrium/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p2c/evaporation/main.cc b/test/porousmediumflow/2p2c/evaporation/main.cc index f5e35121cdf998e972e0af261f3a5c99f25369fd..9cc569510e41ac42e6dafc94e29c557b5869219f 100644 --- a/test/porousmediumflow/2p2c/evaporation/main.cc +++ b/test/porousmediumflow/2p2c/evaporation/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/exceptions.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p2c/injection/main.cc b/test/porousmediumflow/2p2c/injection/main.cc index abf95823594aa8be3da6a9d3dcd12dae0e48858f..c54d287c4503d718d953f89eee24b7546c828045 100644 --- a/test/porousmediumflow/2p2c/injection/main.cc +++ b/test/porousmediumflow/2p2c/injection/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p2c/mpnccomparison/main.cc b/test/porousmediumflow/2p2c/mpnccomparison/main.cc index 11074042ec55b63e50d5571e29bfee2fb60a243a..990a33cd3a697f0866683a9c35821e3ef4f5388a 100644 --- a/test/porousmediumflow/2p2c/mpnccomparison/main.cc +++ b/test/porousmediumflow/2p2c/mpnccomparison/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2p2c/waterair/main.cc b/test/porousmediumflow/2p2c/waterair/main.cc index f8a2539be4f0b05543ec534cf66e0e73aec979c2..0935e31031b39897f0914a7bc0e3157dfd366590 100644 --- a/test/porousmediumflow/2p2c/waterair/main.cc +++ b/test/porousmediumflow/2p2c/waterair/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/exceptions.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2pnc/diffusion/main.cc b/test/porousmediumflow/2pnc/diffusion/main.cc index 7523bc2afe862f011c8a646ea556890be11ee690..a9d9d50cfed93048f46abcf974391a1030eb7811 100644 --- a/test/porousmediumflow/2pnc/diffusion/main.cc +++ b/test/porousmediumflow/2pnc/diffusion/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TwoPNCDiffusionCC; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2pnc/fuelcell/main.cc b/test/porousmediumflow/2pnc/fuelcell/main.cc index 1f4ebd28d5dc154f8fa72dd29f388e35ec150184..f48a35389648e46c796b03dd8f0191a491a835fb 100644 --- a/test/porousmediumflow/2pnc/fuelcell/main.cc +++ b/test/porousmediumflow/2pnc/fuelcell/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2pncmin/isothermal/main.cc b/test/porousmediumflow/2pncmin/isothermal/main.cc index 3909bf55d8ddc8d6c877b99433ae4fe6677bea3d..547eb747aeb46c897a530a5a2969e9fe60360614 100644 --- a/test/porousmediumflow/2pncmin/isothermal/main.cc +++ b/test/porousmediumflow/2pncmin/isothermal/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/2pncmin/nonisothermal/main.cc b/test/porousmediumflow/2pncmin/nonisothermal/main.cc index 5f23efa88ce05eb60b575a18d0fe0b6414a44948..25219dd2a68ae7f7464b9ebc2cc587b235ba8cae 100644 --- a/test/porousmediumflow/2pncmin/nonisothermal/main.cc +++ b/test/porousmediumflow/2pncmin/nonisothermal/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/3p/conduction/main.cc b/test/porousmediumflow/3p/conduction/main.cc index 1f194cb8645a2ff216279eeabe560a5140fe2a3e..f072187abfaeb9f8650ead277a040e41c8d81587 100644 --- a/test/porousmediumflow/3p/conduction/main.cc +++ b/test/porousmediumflow/3p/conduction/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/3p/convection/main.cc b/test/porousmediumflow/3p/convection/main.cc index fe31fa06b9c9b7518dc17e5f68b03e55c131b7ec..57e3c6231b45746dabd19bcaf3965839f35a3ada 100644 --- a/test/porousmediumflow/3p/convection/main.cc +++ b/test/porousmediumflow/3p/convection/main.cc @@ -27,6 +27,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -52,8 +53,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/3p/infiltration/main.cc b/test/porousmediumflow/3p/infiltration/main.cc index abec3c84fb1369b4748fe5c4b9cba75727e2b0c3..3319c5211e081b1fe7205195b0338fe825d28686 100644 --- a/test/porousmediumflow/3p/infiltration/main.cc +++ b/test/porousmediumflow/3p/infiltration/main.cc @@ -22,15 +22,15 @@ * \brief Test for the three-phase isothermal NAPL infiltration model. */ #include <config.h> + #include "properties.hh" -#include <ctime> + #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -47,38 +47,6 @@ #include <dumux/io/vtkoutputmodule.hh> #include <dumux/io/grid/gridmanager.hh> -/*! - * \brief Provides an interface for customizing error messages associated with - * reading in parameters. - * - * \param progName The name of the program, that was tried to be started. - * \param errorMsg The error message that was issued by the start function. - * Comprises the thing that went wrong and a general help message. - */ -void usage(const char *progName, const std::string &errorMsg) -{ - if (errorMsg.size() > 0) { - std::string errorMessageOut = "\nUsage: "; - errorMessageOut += progName; - errorMessageOut += " [options]\n"; - errorMessageOut += errorMsg; - errorMessageOut += "\n\nThe list of mandatory arguments for this program is:\n" - "\t-TimeManager.TEnd End of the simulation [s] \n" - "\t-TimeManager.DtInitial Initial timestep size [s] \n" - "\t-Grid.LowerLeft Lower left corner coordinates\n" - "\t-Grid.UpperRight Upper right corner coordinates\n" - "\t-Grid.Cells Number of cells in respective coordinate directions\n" - "\t definition in DGF format\n" - "\t-SpatialParams.LensLowerLeft coordinates of the lower left corner of the lens [m] \n" - "\t-SpatialParams.LensUpperRight coordinates of the upper right corner of the lens [m] \n" - "\t-SpatialParams.Permeability Permeability of the domain [m^2] \n" - "\t-SpatialParams.PermeabilityLens Permeability of the lens [m^2] \n"; - - std::cout << errorMessageOut - << "\n"; - } -} - int main(int argc, char** argv) { using namespace Dumux; @@ -86,15 +54,16 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) DumuxMessage::print(/*firstCall=*/true); // parse command line arguments and input file - Parameters::init(argc, argv, usage); + Parameters::init(argc, argv); // try to create a grid (from the given grid file or the input file) GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager; diff --git a/test/porousmediumflow/3p3c/columnxylol/main.cc b/test/porousmediumflow/3p3c/columnxylol/main.cc index 7302386ae2bf751faadb73582e51f6636847e725..61175782c25f57e08ea3b25cfd488c5e567b93fe 100644 --- a/test/porousmediumflow/3p3c/columnxylol/main.cc +++ b/test/porousmediumflow/3p3c/columnxylol/main.cc @@ -27,6 +27,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/3p3c/kuevette/main.cc b/test/porousmediumflow/3p3c/kuevette/main.cc index 315a81e35e25b1a287531df17288c4347fbd6b73..f0bd103f9102aaced7d359e219fa1caaba35166d 100644 --- a/test/porousmediumflow/3p3c/kuevette/main.cc +++ b/test/porousmediumflow/3p3c/kuevette/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -54,8 +55,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/3pwateroil/main.cc b/test/porousmediumflow/3pwateroil/main.cc index 19809cef28695982c95c9bb8acf5c7607b643111..de4674b9ff66e6b807b3763d4d4364c9f9142aa7 100644 --- a/test/porousmediumflow/3pwateroil/main.cc +++ b/test/porousmediumflow/3pwateroil/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -55,8 +56,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/co2/main.cc b/test/porousmediumflow/co2/main.cc index 4708e1242b84d504d16bf3a3b26a6c7d22b61c6a..2801e7aab0a782d5a849c81546eeef452fb70bf3 100644 --- a/test/porousmediumflow/co2/main.cc +++ b/test/porousmediumflow/co2/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/mpnc/2p2ccomparison/main.cc b/test/porousmediumflow/mpnc/2p2ccomparison/main.cc index 1a4e76ab553dd9b6f9d6088971a53803e9f0f280..0f781955d98ae994a4957d2d8a1c0dfc8cf4c4b9 100644 --- a/test/porousmediumflow/mpnc/2p2ccomparison/main.cc +++ b/test/porousmediumflow/mpnc/2p2ccomparison/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/mpnc/kinetic/main.cc b/test/porousmediumflow/mpnc/kinetic/main.cc index 73a16d1f8e943e269ab4e72a5529f37e0bcef91b..72f3ebf770a83c776e57a3f7f7feb78139638c51 100644 --- a/test/porousmediumflow/mpnc/kinetic/main.cc +++ b/test/porousmediumflow/mpnc/kinetic/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/mpnc/obstacle/main.cc b/test/porousmediumflow/mpnc/obstacle/main.cc index 1a4e76ab553dd9b6f9d6088971a53803e9f0f280..0f781955d98ae994a4957d2d8a1c0dfc8cf4c4b9 100644 --- a/test/porousmediumflow/mpnc/obstacle/main.cc +++ b/test/porousmediumflow/mpnc/obstacle/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/mpnc/thermalnonequilibrium/main.cc b/test/porousmediumflow/mpnc/thermalnonequilibrium/main.cc index 5876283978ca543e840fda38e653edcaea1e353b..0af1e4c019cfae3a4c25a7023380aa2accf7779d 100644 --- a/test/porousmediumflow/mpnc/thermalnonequilibrium/main.cc +++ b/test/porousmediumflow/mpnc/thermalnonequilibrium/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -57,8 +58,9 @@ int main(int argc, char** argv) //////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/richards/analytical/main.cc b/test/porousmediumflow/richards/analytical/main.cc index 2c170d9fc54244ffb1d515d37d182236604929b5..07c2a6d85d24409a6401398a7ad01309ba49504f 100644 --- a/test/porousmediumflow/richards/analytical/main.cc +++ b/test/porousmediumflow/richards/analytical/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::RichardsAnalyticalCC; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/richards/benchmarks/main.cc b/test/porousmediumflow/richards/benchmarks/main.cc index 605edf7fa9c15cb127f44addf6411b982054701a..8eb769ce7ff21983a99ef65e238ae8c411c0760d 100644 --- a/test/porousmediumflow/richards/benchmarks/main.cc +++ b/test/porousmediumflow/richards/benchmarks/main.cc @@ -35,6 +35,7 @@ #include <dune/common/parallel/mpihelper.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/parameters.hh> #include <dumux/common/math.hh> @@ -56,8 +57,9 @@ int main(int argc, char** argv) { using namespace Dumux; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // parse command line arguments and input file Parameters::init(argc, argv); diff --git a/test/porousmediumflow/richards/lens/main.cc b/test/porousmediumflow/richards/lens/main.cc index 6d174cc1bbdf0570f8f5e391324ead3f51d1411a..07a35d47446b09f935d62161292d43ec7931f6d0 100644 --- a/test/porousmediumflow/richards/lens/main.cc +++ b/test/porousmediumflow/richards/lens/main.cc @@ -24,14 +24,11 @@ #include <config.h> -#include <ctime> #include <iostream> #include <dune/common/version.hh> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> diff --git a/test/porousmediumflow/richards/nonisothermal/conduction/main.cc b/test/porousmediumflow/richards/nonisothermal/conduction/main.cc index 49c9927efa14854a1e748f191be648fd477892a7..e9b8b4c8d7130b344b6729596379d72cc1ab4805 100644 --- a/test/porousmediumflow/richards/nonisothermal/conduction/main.cc +++ b/test/porousmediumflow/richards/nonisothermal/conduction/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/richards/nonisothermal/convection/main.cc b/test/porousmediumflow/richards/nonisothermal/convection/main.cc index 49c9927efa14854a1e748f191be648fd477892a7..e9b8b4c8d7130b344b6729596379d72cc1ab4805 100644 --- a/test/porousmediumflow/richards/nonisothermal/convection/main.cc +++ b/test/porousmediumflow/richards/nonisothermal/convection/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/richards/nonisothermal/evaporation/main.cc b/test/porousmediumflow/richards/nonisothermal/evaporation/main.cc index 74c0f70eb2993f8797fb088288f0c67cc34393d1..257a6e8c900210ca372eb93c75e680f1729609a3 100644 --- a/test/porousmediumflow/richards/nonisothermal/evaporation/main.cc +++ b/test/porousmediumflow/richards/nonisothermal/evaporation/main.cc @@ -26,14 +26,12 @@ #include "properties.hh" -#include <ctime> #include <iostream> #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> -#include <dune/istl/io.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -56,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/richardsnc/main.cc b/test/porousmediumflow/richardsnc/main.cc index 4b8995a779986f43015514968927ae3c4e264c93..1c554e5ce8a31894cc73c80152696443552ad016 100644 --- a/test/porousmediumflow/richardsnc/main.cc +++ b/test/porousmediumflow/richardsnc/main.cc @@ -29,6 +29,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -53,8 +54,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/solidenergy/main.cc b/test/porousmediumflow/solidenergy/main.cc index f809e8e70aa83228b67a506cee01386eba2b1d9c..c76522c00c7db9f584fe037790051ba186232f72 100644 --- a/test/porousmediumflow/solidenergy/main.cc +++ b/test/porousmediumflow/solidenergy/main.cc @@ -28,6 +28,7 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) // define the type tag for this problem using TypeTag = Properties::TTag::SolidEnergyTest; - // initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); // print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/tracer/2ptracer/main.cc b/test/porousmediumflow/tracer/2ptracer/main.cc index 75374081c0a04a5cdee2fef6e9bb2e1a203cc409..e539e0afac63aea2038ded9fdca8a1cf66a7ef4c 100644 --- a/test/porousmediumflow/tracer/2ptracer/main.cc +++ b/test/porousmediumflow/tracer/2ptracer/main.cc @@ -28,8 +28,8 @@ #include <dune/common/parallel/mpihelper.hh> #include <dune/common/timer.hh> -#include <dune/grid/io/file/vtk.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -56,8 +56,10 @@ int main(int argc, char** argv) //! define the type tags for this problem using TwoPTypeTag = Properties::TTag::TwoPIncompressibleTpfa; using TracerTypeTag = Properties::TTag::TwoPTracerTestTpfa; - //! initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //! print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/tracer/constvel/main.cc b/test/porousmediumflow/tracer/constvel/main.cc index c0f214a1aa985b34e422f06788beed58af782162..037e080c0fb4eee564b896845d6d986c5926b15c 100644 --- a/test/porousmediumflow/tracer/constvel/main.cc +++ b/test/porousmediumflow/tracer/constvel/main.cc @@ -30,6 +30,7 @@ #include <dune/common/timer.hh> #include <dune/grid/io/file/vtk/vtksequencewriter.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -50,8 +51,9 @@ int main(int argc, char** argv) //! define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - //! initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //! print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/porousmediumflow/tracer/multiphase/main.cc b/test/porousmediumflow/tracer/multiphase/main.cc index 85ad2836fb349bab581e92fa298d2b5fe4eb0b2d..a1989b15d952fe646c18845d02cc75b778a784fd 100644 --- a/test/porousmediumflow/tracer/multiphase/main.cc +++ b/test/porousmediumflow/tracer/multiphase/main.cc @@ -30,6 +30,7 @@ #include <dune/common/timer.hh> #include <dune/grid/io/file/vtk/vtksequencewriter.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/properties.hh> #include <dumux/common/parameters.hh> #include <dumux/common/dumuxmessage.hh> @@ -51,8 +52,9 @@ int main(int argc, char** argv) //! define the type tag for this problem using TypeTag = Properties::TTag::TYPETAG; - //! initialize MPI, finalize is done automatically on exit - const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); + const auto& mpiHelper = Dune::MPIHelper::instance(); //! print dumux start message if (mpiHelper.rank() == 0) diff --git a/test/python/test_1p.py b/test/python/test_1p.py index 80ff3088c16cd59e2ac085371dcc5716d01dd257..89b14accbcd953db2a4c40e8c1115856e78446ae 100755 --- a/test/python/test_1p.py +++ b/test/python/test_1p.py @@ -129,12 +129,11 @@ problem = Problem() model["Problem"] = Property.fromInstance(problem) # Initialize the GridVariables and the Assembler +sol = blockVector(gridGeometry.numDofs) gridVars = GridVariables(problem=problem, model=model) -assembler = FVAssembler(problem=problem, gridVariables=gridVars, model=model, diffMethod=diffMethod) -sol = blockVector(assembler.numDofs) gridVars.init(sol) -assembler.updateGridVariables(sol) -print("numdofs", assembler.numDofs) +assembler = FVAssembler(problem=problem, gridVariables=gridVars, model=model, diffMethod=diffMethod) +print("num dofs: ", assembler.numDofs) # Assemble the Jacobian and the residual assembler.assembleJacobianAndResidual(sol) diff --git a/test/timestepping/test_timestepmethods.cc b/test/timestepping/test_timestepmethods.cc index 145b79aaf9c3c615803aa8a2c4378e8864128439..8e7bc733426d7648c1ea494fc1c179d43e1fa2be 100644 --- a/test/timestepping/test_timestepmethods.cc +++ b/test/timestepping/test_timestepmethods.cc @@ -10,9 +10,9 @@ #include <dune/common/float_cmp.hh> #include <dune/common/exceptions.hh> -#include <dune/common/parallel/mpihelper.hh> #include <dumux/io/format.hh> +#include <dumux/common/initialize.hh> #include <dumux/common/variables.hh> #include <dumux/nonlinear/newtonsolver.hh> @@ -123,8 +123,8 @@ int main(int argc, char* argv[]) { using namespace Dumux; - // maybe initialize MPI - Dune::MPIHelper::instance(argc, argv); + // maybe initialize MPI and/or multithreading backend + Dumux::initialize(argc, argv); using Assembler = ScalarAssembler; using LinearSolver = ScalarLinearSolver;