diff --git a/dumux/common/timeloop.hh b/dumux/common/timeloop.hh
index e354c704308ea82d87849eff1b03d61bfb9d0d04..5e531b2673716b754267a480634bcbc8ef156585 100644
--- a/dumux/common/timeloop.hh
+++ b/dumux/common/timeloop.hh
@@ -156,7 +156,7 @@ public:
     {
         verbose_ =
             verbose &&
-            Dune::MPIHelper::getCollectiveCommunication().rank() == 0;
+            Dune::MPIHelper::getCommunication().rank() == 0;
 
         time_ = startTime;
         endTime_ = tEnd;
@@ -359,8 +359,8 @@ public:
     /*!
      * \brief Print final status and stops tracking the time.
      */
-    template< class Communicator = Dune::CollectiveCommunication<typename Dune::MPIHelper::MPICommunicator> >
-    void finalize(const Communicator& comm = Dune::MPIHelper::getCollectiveCommunication())
+    template< class Communicator = Dune::Communication<typename Dune::MPIHelper::MPICommunicator> >
+    void finalize(const Communicator& comm = Dune::MPIHelper::getCommunication())
     {
         auto cpuTime = timer_.stop();
 
diff --git a/dumux/discretization/cellcentered/mpfa/helper.hh b/dumux/discretization/cellcentered/mpfa/helper.hh
index f1b78cd9827fabe5f9d14bb04ca9588c976f066a..1d772bc190aac5f0796e62f2d4ae73b79222a79a 100644
--- a/dumux/discretization/cellcentered/mpfa/helper.hh
+++ b/dumux/discretization/cellcentered/mpfa/helper.hh
@@ -550,7 +550,7 @@ public:
         std::vector<bool> ghostVertices(gridView.size(dim), false);
 
         // if not run in parallel, skip the rest
-        if (Dune::MPIHelper::getCollectiveCommunication().size() == 1)
+        if (Dune::MPIHelper::getCommunication().size() == 1)
             return ghostVertices;
 
         // mpfa methods cannot yet handle ghost cells
diff --git a/dumux/io/grid/cpgridmanager.hh b/dumux/io/grid/cpgridmanager.hh
index dbaf27a9f183af9e113f9ef6c0ce39292c3fd369..1fba301fbe5150be3689a9f93a1ab9e67cf20e17 100644
--- a/dumux/io/grid/cpgridmanager.hh
+++ b/dumux/io/grid/cpgridmanager.hh
@@ -82,7 +82,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
             grid_->loadBalance();
     }
 
diff --git a/dumux/io/grid/gmshgriddatahandle.hh b/dumux/io/grid/gmshgriddatahandle.hh
index b8af221b7cc80ffa4dc5d429693836a91fca2412..bef8ef4cfa3b93718f6a87ebbf8487492cd6521b 100644
--- a/dumux/io/grid/gmshgriddatahandle.hh
+++ b/dumux/io/grid/gmshgriddatahandle.hh
@@ -152,13 +152,13 @@ struct GmshGridDataHandle<Dune::UGGrid<dimgrid>, GridFactory, Data>
         // all processes of the boundary markers vector is zero. If yes, assume
         // that the root process contains all markers and broadcast them.
         auto bmSizeMin = boundaryMarkers_.size();
-        Dune::MPIHelper::getCollectiveCommunication().min(&bmSizeMin, 1);
+        Dune::MPIHelper::getCommunication().min(&bmSizeMin, 1);
         if (bmSizeMin == 0)
         {
             auto bmSize = boundaryMarkers_.size();
-            Dune::MPIHelper::getCollectiveCommunication().broadcast(&bmSize, 1, 0);
+            Dune::MPIHelper::getCommunication().broadcast(&bmSize, 1, 0);
             boundaryMarkers_.resize(bmSize);
-            Dune::MPIHelper::getCollectiveCommunication().broadcast(&boundaryMarkers_.front(), bmSize, 0);
+            Dune::MPIHelper::getCommunication().broadcast(&boundaryMarkers_.front(), bmSize, 0);
         }
     }
 
diff --git a/dumux/io/grid/gridmanager_alu.hh b/dumux/io/grid/gridmanager_alu.hh
index 3d0d16a036da1efb8348ebb70eba6a08e465f5b8..95db21e6d07d3c202f1b3b1f2ee51d42223249a0 100644
--- a/dumux/io/grid/gridmanager_alu.hh
+++ b/dumux/io/grid/gridmanager_alu.hh
@@ -82,7 +82,7 @@ public:
                 std::cerr << "Warning: You are using a deprecated restart mechanism. The usage will change in the future.\n";
             }
 
-            const int rank = Dune::MPIHelper::getCollectiveCommunication().rank();
+            const int rank = Dune::MPIHelper::getCommunication().rank();
             const std::string name = getParamFromGroup<std::string>(modelParamGroup, "Problem.Name");
             std::ostringstream oss;
             oss << name << "_time=" << restartTime << "_rank=" << rank << ".grs";
@@ -187,7 +187,7 @@ public:
             {
                 auto gridFactory = std::make_unique<Dune::GridFactory<Grid>>();
 
-                if (Dune::MPIHelper::getCollectiveCommunication().rank() == 0)
+                if (Dune::MPIHelper::getCommunication().rank() == 0)
                 Dune::GmshReader<Grid>::read(*gridFactory, fileName, verbose, boundarySegments);
 
                 ParentType::gridPtr() = std::shared_ptr<Grid>(gridFactory->createGrid());
diff --git a/dumux/io/grid/gridmanager_base.hh b/dumux/io/grid/gridmanager_base.hh
index 2e79542caad81b907fd34f1dde797857f06993cd..9d90cbc6e10680d58d52c09737283dfca6c2e29e 100644
--- a/dumux/io/grid/gridmanager_base.hh
+++ b/dumux/io/grid/gridmanager_base.hh
@@ -95,7 +95,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
         {
             // if we may have dgf parameters use load balancing of the dgf pointer
             if(enableDgfGridPointer_)
@@ -230,7 +230,7 @@ protected:
         // VTK file formats for unstructured grids
         else if (extension == "vtu" || extension == "vtp")
         {
-            if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+            if (Dune::MPIHelper::getCommunication().size() > 1)
                 DUNE_THROW(Dune::NotImplemented, "Reading grids in parallel from VTK file formats is currently not supported!");
 
             VTKReader vtkReader(fileName);
diff --git a/dumux/io/grid/gridmanager_sub.hh b/dumux/io/grid/gridmanager_sub.hh
index fadeff1335996969a8b93ade27db946d086d0f39..b0d2ef6ccce398af0d6b61bdcdf23ca657e5e305 100644
--- a/dumux/io/grid/gridmanager_sub.hh
+++ b/dumux/io/grid/gridmanager_sub.hh
@@ -107,7 +107,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
             this->grid().loadBalance();
     }
 
diff --git a/dumux/io/grid/gridmanager_ug.hh b/dumux/io/grid/gridmanager_ug.hh
index ab9b043c5bf8f1e98d6506d38825cbde4c53fba1..d624f1499ef91f212a178e7f3876a5148e27a050 100644
--- a/dumux/io/grid/gridmanager_ug.hh
+++ b/dumux/io/grid/gridmanager_ug.hh
@@ -114,7 +114,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
         {
             // if we may have dgf parameters use load balancing of the dgf pointer
             if(ParentType::enableDgfGridPointer_)
diff --git a/dumux/io/grid/porenetwork/gridmanager.hh b/dumux/io/grid/porenetwork/gridmanager.hh
index 83aaf35a90b7a1fe3db888a4d6f29084bc394554..2b0d1fe3486d4883eb8a4944255f4f44f62ef904 100644
--- a/dumux/io/grid/porenetwork/gridmanager.hh
+++ b/dumux/io/grid/porenetwork/gridmanager.hh
@@ -168,7 +168,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
         {
             // if we may have dgf parameters use load balancing of the dgf pointer
             if (enableDgfGridPointer_)
diff --git a/dumux/io/grid/porenetwork/structuredlatticegridcreator.hh b/dumux/io/grid/porenetwork/structuredlatticegridcreator.hh
index 4e1024fd6733d8c473e6d49e6f46882e0fedfee2..a6e81a102b97ef444ee8518925e8010591860f9c 100644
--- a/dumux/io/grid/porenetwork/structuredlatticegridcreator.hh
+++ b/dumux/io/grid/porenetwork/structuredlatticegridcreator.hh
@@ -127,7 +127,7 @@ public:
      */
     void loadBalance()
     {
-        if (Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+        if (Dune::MPIHelper::getCommunication().size() > 1)
             gridPtr_->loadBalance();
     }
 
diff --git a/dumux/io/vtk/intersectionwriter.hh b/dumux/io/vtk/intersectionwriter.hh
index d87a63899f0acb369a81c92e7110500b62eae2e1..b944226578cf1459af605117de7514d68d55bd2e 100644
--- a/dumux/io/vtk/intersectionwriter.hh
+++ b/dumux/io/vtk/intersectionwriter.hh
@@ -130,7 +130,7 @@ public:
     using Point = Corner;
     using PointIterator = CornerIterator;
     using ConnectivityWriter = Dune::VTK::NonConformingConnectivityWriter<Cell>;
-    using CollectiveCommunication = typename GridView::CollectiveCommunication;
+    using Communication = typename GridView::CollectiveCommunication;
 
     explicit NonConformingIntersectionIteratorFactory(const GridView& gv)
     : gridView_(gv) {}
@@ -156,7 +156,7 @@ public:
     ConnectivityWriter makeConnectivity() const
     { return ConnectivityWriter(); }
 
-    const CollectiveCommunication& comm() const
+    const Communication& comm() const
     { return gridView_.comm(); }
 
 private:
diff --git a/dumux/io/vtk/vtkreader.hh b/dumux/io/vtk/vtkreader.hh
index ea65e39f3e4347aed7adfcc93c3c384f76c18a4b..db797d60ba2b900693218addbab053ac8eb41f6e 100644
--- a/dumux/io/vtk/vtkreader.hh
+++ b/dumux/io/vtk/vtkreader.hh
@@ -64,7 +64,7 @@ public:
     explicit VTKReader(const std::string& fileName)
     {
         using namespace tinyxml2;
-        fileName_ = Dune::MPIHelper::getCollectiveCommunication().size() > 1 ?
+        fileName_ = Dune::MPIHelper::getCommunication().size() > 1 ?
                         getProcessFileName_(fileName) : fileName;
 
         const auto eResult = doc_.LoadFile(fileName_.c_str());
@@ -193,7 +193,7 @@ private:
 
         // get the first piece node
         const XMLElement* pieceNode = getPieceNode_(pDoc, pvtkFileName);
-        const auto myrank = Dune::MPIHelper::getCollectiveCommunication().rank();
+        const auto myrank = Dune::MPIHelper::getCommunication().rank();
         for (int rank = 0; rank < myrank; ++rank)
         {
             pieceNode = pieceNode->NextSiblingElement("Piece");
diff --git a/dumux/linear/amgbackend.hh b/dumux/linear/amgbackend.hh
index d83b7533cbdd33fa4c442e3b42cb57cc323be8bb..468d4f7dd941eb746d0bf9508b3dc2bafacd3a97 100644
--- a/dumux/linear/amgbackend.hh
+++ b/dumux/linear/amgbackend.hh
@@ -57,7 +57,7 @@ public:
      */
     AMGBiCGSTABBackend(const std::string& paramGroup = "")
     : LinearSolver(paramGroup)
-    , isParallel_(Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+    , isParallel_(Dune::MPIHelper::getCommunication().size() > 1)
     {
         if (isParallel_)
             DUNE_THROW(Dune::InvalidStateException, "Using sequential constructor for parallel run. Use signature with gridView and dofMapper!");
@@ -77,7 +77,7 @@ public:
                        const std::string& paramGroup = "")
     : LinearSolver(paramGroup)
 #if HAVE_MPI
-    , isParallel_(Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+    , isParallel_(Dune::MPIHelper::getCommunication().size() > 1)
 #endif
     {
 #if HAVE_MPI
diff --git a/dumux/linear/istlsolverfactorybackend.hh b/dumux/linear/istlsolverfactorybackend.hh
index baff4827d0eda08db34576a67084a1f209786d2e..9c90f0f83e1e5a90242d406f92296a558260fb1c 100644
--- a/dumux/linear/istlsolverfactorybackend.hh
+++ b/dumux/linear/istlsolverfactorybackend.hh
@@ -126,7 +126,7 @@ public:
      */
     IstlSolverFactoryBackend(const std::string& paramGroup = "")
     : paramGroup_(paramGroup)
-    , isParallel_(Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+    , isParallel_(Dune::MPIHelper::getCommunication().size() > 1)
     {
         if (isParallel_)
             DUNE_THROW(Dune::InvalidStateException, "Using sequential constructor for parallel run. Use signature with gridView and dofMapper!");
@@ -147,7 +147,7 @@ public:
                              const std::string& paramGroup = "")
     : paramGroup_(paramGroup)
 #if HAVE_MPI
-    , isParallel_(Dune::MPIHelper::getCollectiveCommunication().size() > 1)
+    , isParallel_(Dune::MPIHelper::getCommunication().size() > 1)
 #endif
     {
         firstCall_ = true;
diff --git a/dumux/linear/pdesolver.hh b/dumux/linear/pdesolver.hh
index 2303b8cbc50c07c68a42fe65f979641d5ca193ca..b4cdc6294aea2ff5482f055337ec2c0ea302a096 100644
--- a/dumux/linear/pdesolver.hh
+++ b/dumux/linear/pdesolver.hh
@@ -327,7 +327,7 @@ private:
     //! initialize the parameters by reading from the parameter tree
     void initParams_(const std::string& group = "")
     {
-        verbose_ = (Dune::MPIHelper::getCollectiveCommunication().rank() == 0);
+        verbose_ = (Dune::MPIHelper::getCommunication().rank() == 0);
     }
 
     //! switches on/off verbosity
diff --git a/dumux/multidomain/newtonsolver.hh b/dumux/multidomain/newtonsolver.hh
index 360413d737fb8ea4a58bef55f391f69bafef9af4..7f2f93da0bf58676a89c0fe0b580b0e0d184e9fc 100644
--- a/dumux/multidomain/newtonsolver.hh
+++ b/dumux/multidomain/newtonsolver.hh
@@ -46,7 +46,7 @@ using GetPVSwitchMultiDomain = Dune::Std::detected_or<int, DetectPVSwitchMultiDo
  */
 template <class Assembler, class LinearSolver, class CouplingManager,
           class Reassembler = DefaultPartialReassembler,
-          class Comm = Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> >
+          class Comm = Dune::Communication<Dune::MPIHelper::MPICommunicator> >
 class MultiDomainNewtonSolver: public NewtonSolver<Assembler, LinearSolver, Reassembler, Comm>
 {
     using ParentType = NewtonSolver<Assembler, LinearSolver, Reassembler, Comm>;
@@ -74,7 +74,7 @@ public:
     MultiDomainNewtonSolver(std::shared_ptr<Assembler> assembler,
                             std::shared_ptr<LinearSolver> linearSolver,
                             std::shared_ptr<CouplingManager> couplingManager,
-                            const Comm& comm = Dune::MPIHelper::getCollectiveCommunication(),
+                            const Comm& comm = Dune::MPIHelper::getCommunication(),
                             const std::string& paramGroup = "")
     : ParentType(assembler, linearSolver, comm, paramGroup)
     , couplingManager_(couplingManager)
diff --git a/dumux/nonlinear/newtonsolver.hh b/dumux/nonlinear/newtonsolver.hh
index 0b00ce33dce582526401cf89c4b6a2580adc41dd..0701f314f95fb3d7df67242fffa8dfd3e1352e5d 100644
--- a/dumux/nonlinear/newtonsolver.hh
+++ b/dumux/nonlinear/newtonsolver.hh
@@ -211,7 +211,7 @@ using BlockType = typename BlockTypeHelper<SolutionVector, Dune::IsNumber<Soluti
  */
 template <class Assembler, class LinearSolver,
           class Reassembler = PartialReassembler<Assembler>,
-          class Comm = Dune::CollectiveCommunication<Dune::MPIHelper::MPICommunicator> >
+          class Comm = Dune::Communication<Dune::MPIHelper::MPICommunicator> >
 class NewtonSolver : public PDESolver<Assembler, LinearSolver>
 {
     using ParentType = PDESolver<Assembler, LinearSolver>;
@@ -244,7 +244,7 @@ public:
      */
     NewtonSolver(std::shared_ptr<Assembler> assembler,
                  std::shared_ptr<LinearSolver> linearSolver,
-                 const Communication& comm = Dune::MPIHelper::getCollectiveCommunication(),
+                 const Communication& comm = Dune::MPIHelper::getCommunication(),
                  const std::string& paramGroup = "")
     : ParentType(assembler, linearSolver)
     , endIterMsgStream_(std::ostringstream::out)
diff --git a/examples/1ptracer/doc/main.md b/examples/1ptracer/doc/main.md
index 045bfc3e38c43c60943f7dd3b217c5f9125288bb..3d9741c64f1020bfde92f41c00e350df0be3c3ef 100644
--- a/examples/1ptracer/doc/main.md
+++ b/examples/1ptracer/doc/main.md
@@ -199,7 +199,7 @@ problem defined in `problem_1p.hh`. Let us now write this solution to a VTK file
 
     // print overall CPU time required for assembling and solving the 1p problem.
     timer.stop();
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/examples/1ptracer/main.cc b/examples/1ptracer/main.cc
index 21569bc52d9f5fd532aa140a8be8f529de6573b5..4faeafcdbf54aaf8a012c11a71d50aba9669a515 100644
--- a/examples/1ptracer/main.cc
+++ b/examples/1ptracer/main.cc
@@ -161,7 +161,7 @@ int main(int argc, char** argv) try
 
     // print overall CPU time required for assembling and solving the 1p problem.
     timer.stop();
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/channel/1d/main.cc b/test/freeflow/navierstokes/channel/1d/main.cc
index 6a30b5fa83680656cf1bd1fdc120161cd0913427..a6c9d6bd917cad99948a6d4cab0c3249a70a4619 100644
--- a/test/freeflow/navierstokes/channel/1d/main.cc
+++ b/test/freeflow/navierstokes/channel/1d/main.cc
@@ -163,7 +163,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/channel/3d/main.cc b/test/freeflow/navierstokes/channel/3d/main.cc
index 40df458c2b91d7bb9eaeaad5bfd6cd977d3f9fe8..2ba127d34d836c842abf182306e09bfc635bb747 100644
--- a/test/freeflow/navierstokes/channel/3d/main.cc
+++ b/test/freeflow/navierstokes/channel/3d/main.cc
@@ -238,7 +238,7 @@ flux.addSurface("middle", p0middle, p1middle);
 
     std::cout << "analyticalFlux: " << problem->analyticalFlux() << std::endl;
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/donea/main.cc b/test/freeflow/navierstokes/donea/main.cc
index 9909f86cf7bbc5d4f676ec2e05aa28660c975d8f..7986ac5af9fe636e2ba6b1690114c82d4b3eb8b7 100644
--- a/test/freeflow/navierstokes/donea/main.cc
+++ b/test/freeflow/navierstokes/donea/main.cc
@@ -167,7 +167,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/kovasznay/main.cc b/test/freeflow/navierstokes/kovasznay/main.cc
index 544559710b55976116919ec060f4d6b0f77e78cf..c8b054a9b24f5eb71dd9053ce6e84d5a46b0e545 100644
--- a/test/freeflow/navierstokes/kovasznay/main.cc
+++ b/test/freeflow/navierstokes/kovasznay/main.cc
@@ -172,7 +172,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/periodic/main.cc b/test/freeflow/navierstokes/periodic/main.cc
index a3b0f416bdd971d5e2c6341d462bd133dde1922e..aa562e9e9bebc63de1f9361dc5994f63c6cab1b3 100644
--- a/test/freeflow/navierstokes/periodic/main.cc
+++ b/test/freeflow/navierstokes/periodic/main.cc
@@ -165,7 +165,7 @@ int main(int argc, char** argv)
     vtkWriter.write(1.0);
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/freeflow/navierstokes/sincos/main.cc b/test/freeflow/navierstokes/sincos/main.cc
index 510cba6fac6874f410ac665e4b0c5bc69139a91d..563a6536b7d709506c0d7303d6a91ddec1773b70 100644
--- a/test/freeflow/navierstokes/sincos/main.cc
+++ b/test/freeflow/navierstokes/sincos/main.cc
@@ -226,7 +226,7 @@ int main(int argc, char** argv)
 
         timer.stop();
 
-        const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+        const auto& comm = Dune::MPIHelper::getCommunication();
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                 << comm.size() << " processes.\n"
                 << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/geomechanics/elastic/main.cc b/test/geomechanics/elastic/main.cc
index a6b49e382ffef415c5859b13a31a0c25b61e9e9f..68e5b12b234c2b5470ede07afa21073be440c3a0 100644
--- a/test/geomechanics/elastic/main.cc
+++ b/test/geomechanics/elastic/main.cc
@@ -132,7 +132,7 @@ int main(int argc, char** argv)
     vtkWriter.write(1.0);
 
     // print time and say goodbye
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     if (mpiHelper.rank() == 0)
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                   << comm.size() << " processes.\n"
diff --git a/test/geomechanics/poroelastic/main.cc b/test/geomechanics/poroelastic/main.cc
index 02e81a968a09b9acbc74e570af5ea96f5b8dd185..8f5587c2ac7066315fd0c7043aa20cecc469d60f 100644
--- a/test/geomechanics/poroelastic/main.cc
+++ b/test/geomechanics/poroelastic/main.cc
@@ -201,7 +201,7 @@ int main(int argc, char** argv)
     vtkWriter.write(1.0);
 
     // print time and say goodbye
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     if (mpiHelper.rank() == 0)
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                   << comm.size() << " processes.\n"
diff --git a/test/multidomain/boundary/darcydarcy/1p_1p/main.cc b/test/multidomain/boundary/darcydarcy/1p_1p/main.cc
index 1cbab4d60ea4727aea4c5935d655b4adf9c97263..1005c4dcca0f1b1cf35697455791cd2b63470465 100644
--- a/test/multidomain/boundary/darcydarcy/1p_1p/main.cc
+++ b/test/multidomain/boundary/darcydarcy/1p_1p/main.cc
@@ -236,7 +236,7 @@ int main(int argc, char** argv)
         timeLoop->setTimeStepSize(nonLinearSolver.suggestTimeStepSize(timeLoop->timeStepSize()));
     }
 
-    timeLoop->finalize(mpiHelper.getCollectiveCommunication());
+    timeLoop->finalize(mpiHelper.getCommunication());
 
     //////////////////////////////////////////////////////////////////////////
     // write out a combined vtu for vtu comparison in the testing framework
diff --git a/test/multidomain/boundary/darcydarcy/1p_2p/main.cc b/test/multidomain/boundary/darcydarcy/1p_2p/main.cc
index 385d63c44812ee55f494ce3cc7fd6b730f902860..525c22f537ee4e7724bcd87f09c5f3c8a434520b 100644
--- a/test/multidomain/boundary/darcydarcy/1p_2p/main.cc
+++ b/test/multidomain/boundary/darcydarcy/1p_2p/main.cc
@@ -205,7 +205,7 @@ int main(int argc, char** argv)
         timeLoop->setTimeStepSize(nonLinearSolver.suggestTimeStepSize(timeLoop->timeStepSize()));
     }
 
-    timeLoop->finalize(mpiHelper.getCollectiveCommunication());
+    timeLoop->finalize(mpiHelper.getCommunication());
 
     ////////////////////////////////////////////////////////////
     // finalize, print dumux message to say goodbye
diff --git a/test/multidomain/boundary/freeflowporenetwork/1p_1p/CMakeLists.txt b/test/multidomain/boundary/freeflowporenetwork/1p_1p/CMakeLists.txt
index afa6e0e818ced924abc53f6651f4c2b1a645d9e6..417fa71f90d95c0f3b52ef7288b98eaa26242267 100644
--- a/test/multidomain/boundary/freeflowporenetwork/1p_1p/CMakeLists.txt
+++ b/test/multidomain/boundary/freeflowporenetwork/1p_1p/CMakeLists.txt
@@ -3,7 +3,7 @@ add_input_file_links()
 dumux_add_test(NAME test_md_boundary_ff1p_pnm1p
               LABELS multidomain multidomain_boundary freeflowpnm 1p navierstokes
               SOURCES main.cc
-              CMAKE_GUARD HAVE_UMFPACK
+              CMAKE_GUARD "( HAVE_UMFPACK AND dune-foamgrid_FOUND )"
               COMMAND ${CMAKE_SOURCE_DIR}/bin/testing/runtest.py
               CMD_ARGS  --script fuzzy
                         --files ${CMAKE_SOURCE_DIR}/test/references/test_md_boundary_ff1p_pnm1p_ff-reference.vtu
diff --git a/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc b/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc
index a6f440007a9be537d0ac9875809fb2e897b0d859..2bbddbda7f4f09df43ee32824a7df34f0d4d8c5b 100644
--- a/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc
+++ b/test/multidomain/embedded/1d3d/1p2c_richards2c/main.cc
@@ -408,7 +408,7 @@ int main(int argc, char** argv)
 
     outFile.close();
 
-    timeLoop->finalize(mpiHelper.getCollectiveCommunication());
+    timeLoop->finalize(mpiHelper.getCommunication());
 
     ////////////////////////////////////////////////////////////
     // finalize, print dumux message to say goodbye
diff --git a/test/multidomain/embedded/1d3d/1p_richards/main.cc b/test/multidomain/embedded/1d3d/1p_richards/main.cc
index 79357b586c54602b067bacbdd65b8dc027d90c47..941471f3a40b21079cbc28ff5de3b8b63aae7cfd 100644
--- a/test/multidomain/embedded/1d3d/1p_richards/main.cc
+++ b/test/multidomain/embedded/1d3d/1p_richards/main.cc
@@ -210,7 +210,7 @@ int main(int argc, char** argv)
         timeLoop->setTimeStepSize(nonLinearSolver.suggestTimeStepSize(timeLoop->timeStepSize()));
     }
 
-    timeLoop->finalize(mpiHelper.getCollectiveCommunication());
+    timeLoop->finalize(mpiHelper.getCommunication());
 
     ////////////////////////////////////////////////////////////
     // finalize, print dumux message to say goodbye
diff --git a/test/porenetwork/1p/main.cc b/test/porenetwork/1p/main.cc
index c0004b776b3e13bd6c75f9800855f0a432971534..41155ab8cbb3627c98733e9596a2dfb610f62178 100644
--- a/test/porenetwork/1p/main.cc
+++ b/test/porenetwork/1p/main.cc
@@ -145,7 +145,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     if (mpiHelper.rank() == 0)
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                   << comm.size() << " processes.\n"
diff --git a/test/porousmediumflow/1p/compressible/stationary/main.cc b/test/porousmediumflow/1p/compressible/stationary/main.cc
index 9141d2337452f843622e404e5e2d1116f0700e6d..e587eab9b8c9a8fb8f6e61474b6582bb6af12dc2 100644
--- a/test/porousmediumflow/1p/compressible/stationary/main.cc
+++ b/test/porousmediumflow/1p/compressible/stationary/main.cc
@@ -126,7 +126,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     std::cout << "Simulation took " << timer.elapsed() << " seconds on "
               << comm.size() << " processes.\n"
               << "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
diff --git a/test/porousmediumflow/1p/incompressible/main.cc b/test/porousmediumflow/1p/incompressible/main.cc
index 6c7ab9617520b0def839023894448556f37ceee9..2bf4beeae80dd2a5ee84c2dfc2181d0ba16472b6 100644
--- a/test/porousmediumflow/1p/incompressible/main.cc
+++ b/test/porousmediumflow/1p/incompressible/main.cc
@@ -197,7 +197,7 @@ int main(int argc, char** argv)
         }
     }
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     if (mpiHelper.rank() == 0)
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                   << comm.size() << " processes.\n"
diff --git a/test/porousmediumflow/1p/periodicbc/main.cc b/test/porousmediumflow/1p/periodicbc/main.cc
index cad884638e17005c6f2aaee8933218af82da35be..2ee30d141bc7e1d27a0d458aafeafc6ab3e6bc13 100644
--- a/test/porousmediumflow/1p/periodicbc/main.cc
+++ b/test/porousmediumflow/1p/periodicbc/main.cc
@@ -143,7 +143,7 @@ int main(int argc, char** argv)
 
     timer.stop();
 
-    const auto& comm = Dune::MPIHelper::getCollectiveCommunication();
+    const auto& comm = Dune::MPIHelper::getCommunication();
     if (mpiHelper.rank() == 0)
         std::cout << "Simulation took " << timer.elapsed() << " seconds on "
                   << comm.size() << " processes.\n"