From b263389623b6fd998534c9cbbaaabe419d74a410 Mon Sep 17 00:00:00 2001
From: Kilian Weishaupt <kilian.weishaupt@iws.uni-stuttgart.de>
Date: Tue, 14 Apr 2020 09:49:48 +0200
Subject: [PATCH] [sequential] Use new vector handles to replace depreacted
 vertexhandles

---
 .../2p/sequential/impes/gridadaptionindicator.hh          | 4 ++--
 .../2p2c/sequential/fv2dtransportadaptive.hh              | 4 ++--
 .../2p2c/sequential/fv3dpressureadaptive.hh               | 3 ++-
 .../2p2c/sequential/fv3dtransportadaptive.hh              | 4 ++--
 .../fvmpfal3dinteractionvolumecontaineradaptive.hh        | 1 -
 .../2p2c/sequential/fvpressuremultiphysics.hh             | 4 ++--
 dumux/porousmediumflow/2p2c/sequential/fvtransport.hh     | 8 ++++----
 .../2p2c/sequential/fvtransportmultiphysics.hh            | 4 ++--
 .../porousmediumflow/sequential/cellcentered/transport.hh | 8 ++++----
 .../porousmediumflow/sequential/variableclassadaptive.hh  | 4 ++--
 10 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/dumux/porousmediumflow/2p/sequential/impes/gridadaptionindicator.hh b/dumux/porousmediumflow/2p/sequential/impes/gridadaptionindicator.hh
index 60fa0c7054..b960ae7c9e 100644
--- a/dumux/porousmediumflow/2p/sequential/impes/gridadaptionindicator.hh
+++ b/dumux/porousmediumflow/2p/sequential/impes/gridadaptionindicator.hh
@@ -26,7 +26,7 @@
 
 #include <dumux/porousmediumflow/sequential/impetproperties.hh>
 #include <dumux/porousmediumflow/2p/sequential/properties.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 namespace Dumux {
 
@@ -144,7 +144,7 @@ public:
 
 #if HAVE_MPI
     // communicate updated values
-    using DataHandle = VectorExchange<ElementMapper, ScalarSolutionType>;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, ScalarSolutionType, 0/*elementCodim*/>;
     DataHandle dataHandle(problem_.elementMapper(), indicatorVector_);
     problem_.gridView().template communicate<DataHandle>(dataHandle,
                                                          Dune::InteriorBorder_All_Interface,
diff --git a/dumux/porousmediumflow/2p2c/sequential/fv2dtransportadaptive.hh b/dumux/porousmediumflow/2p2c/sequential/fv2dtransportadaptive.hh
index fd19773d84..b7937fb722 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fv2dtransportadaptive.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fv2dtransportadaptive.hh
@@ -28,7 +28,7 @@
 #include <dune/common/float_cmp.hh>
 
 #include <dumux/common/math.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 #include "adaptiveproperties.hh"
 #include "fvtransport.hh"
@@ -305,7 +305,7 @@ void FV2dTransport2P2CAdaptive<TypeTag>::update(const Scalar t, Scalar& dt, Tran
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> > >;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> >, 0/*elementCodim*/>;
     for (int i = 0; i < updateVec.size(); i++)
     {
         DataHandle dataHandle(problem_.variables().elementMapper(), updateVec[i]);
diff --git a/dumux/porousmediumflow/2p2c/sequential/fv3dpressureadaptive.hh b/dumux/porousmediumflow/2p2c/sequential/fv3dpressureadaptive.hh
index 98ec017f9f..e4972f7d46 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fv3dpressureadaptive.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fv3dpressureadaptive.hh
@@ -35,6 +35,7 @@
 #include <dumux/common/math.hh>
 #include <dumux/io/vtkmultiwriter.hh>
 #include <dumux/porousmediumflow/2p2c/sequential/adaptiveproperties.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 // include pressure model from Markus
 #include <dumux/porousmediumflow/sequential/cellcentered/mpfa/properties.hh>
@@ -233,7 +234,7 @@ public:
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
     using PressureSolution = GetPropType<TypeTag, Properties::PressureSolutionVector>;
-    using DataHandle = VectorExchange<ElementMapper, PressureSolution>;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, PressureSolution, 0/*elementCodim*/>;
 
         DataHandle dataHandle(problem().variables().elementMapper(), this->pressure());
         problem().gridView().template communicate<DataHandle>(dataHandle,
diff --git a/dumux/porousmediumflow/2p2c/sequential/fv3dtransportadaptive.hh b/dumux/porousmediumflow/2p2c/sequential/fv3dtransportadaptive.hh
index 0a711471e1..0d62ab64ac 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fv3dtransportadaptive.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fv3dtransportadaptive.hh
@@ -28,7 +28,7 @@
 #include <dune/common/float_cmp.hh>
 
 #include <dumux/common/math.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 #include "adaptiveproperties.hh"
 #include "fvtransport.hh"
@@ -303,7 +303,7 @@ void FV3dTransport2P2CAdaptive<TypeTag>::update(const Scalar t, Scalar& dt,
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> > >;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> >, 0/*elementCodim*/>;
     for (int i = 0; i < updateVec.size(); i++)
     {
         DataHandle dataHandle(problem().variables().elementMapper(), updateVec[i]);
diff --git a/dumux/porousmediumflow/2p2c/sequential/fvmpfal3dinteractionvolumecontaineradaptive.hh b/dumux/porousmediumflow/2p2c/sequential/fvmpfal3dinteractionvolumecontaineradaptive.hh
index f5c1c2a69c..c6cccc8f62 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fvmpfal3dinteractionvolumecontaineradaptive.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fvmpfal3dinteractionvolumecontaineradaptive.hh
@@ -26,7 +26,6 @@
 
 // dumux environment
 #include <dumux/porousmediumflow/2p/sequential/diffusion/mpfa/lmethod/3dinteractionvolumecontaineradaptive.hh>
-#include <dumux/linear/vectorexchange.hh>
 
 namespace Dumux {
 
diff --git a/dumux/porousmediumflow/2p2c/sequential/fvpressuremultiphysics.hh b/dumux/porousmediumflow/2p2c/sequential/fvpressuremultiphysics.hh
index 0d95a94d09..8c49192faf 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fvpressuremultiphysics.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fvpressuremultiphysics.hh
@@ -28,7 +28,7 @@
 
 // dumux environment
 #include <dumux/porousmediumflow/2p2c/sequential/fvpressure.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 #include <dumux/material/constraintsolvers/compositionalflash.hh>
 
 namespace Dumux {
@@ -223,7 +223,7 @@ public:
 protected:
     #if HAVE_MPI
         using ElementMapper = typename SolutionTypes::ElementMapper;
-        using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<int, 1> > >;
+        using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<int, 1> >, 0/*elementCodim*/>;
     #endif
 
     // subdomain map
diff --git a/dumux/porousmediumflow/2p2c/sequential/fvtransport.hh b/dumux/porousmediumflow/2p2c/sequential/fvtransport.hh
index ca31d80701..96df1dd835 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fvtransport.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fvtransport.hh
@@ -33,7 +33,7 @@
 #include <dumux/porousmediumflow/2p2c/sequential/properties.hh>
 #include <dumux/material/constraintsolvers/compositionalflash.hh>
 #include <dumux/common/math.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 namespace Dumux {
 /*!
@@ -479,7 +479,7 @@ void FVTransport2P2C<TypeTag>::update(const Scalar t, Scalar& dt,
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> > >;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> >, 0/*elementCodim*/>;
     for (int i = 0; i < updateVec.size(); i++)
     {
         DataHandle dataHandle(problem_.variables().elementMapper(), updateVec[i]);
@@ -490,7 +490,7 @@ void FVTransport2P2C<TypeTag>::update(const Scalar t, Scalar& dt,
 
     if (localTimeStepping_)
     {
-        using TimeDataHandle = VectorExchange<ElementMapper, std::vector<LocalTimesteppingData> >;
+        using TimeDataHandle = VectorCommDataHandleEqual<ElementMapper, std::vector<LocalTimesteppingData>, 0/*elementCodim*/>;
 
         TimeDataHandle timeDataHandle(problem_.elementMapper(), timeStepData_);
         problem_.gridView().template communicate<TimeDataHandle>(timeDataHandle,
@@ -1318,7 +1318,7 @@ void FVTransport2P2C<TypeTag>::updatedTargetDt_(Scalar &dt)
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using TimeDataHandle = VectorExchange<ElementMapper, std::vector<LocalTimesteppingData> >;
+    using TimeDataHandle = VectorCommDataHandleEqual<ElementMapper, std::vector<LocalTimesteppingData>, 0/*elementCodim*/>;
 
     TimeDataHandle timeDataHandle(problem_.elementMapper(), timeStepData_);
     problem_.gridView().template communicate<TimeDataHandle>(timeDataHandle,
diff --git a/dumux/porousmediumflow/2p2c/sequential/fvtransportmultiphysics.hh b/dumux/porousmediumflow/2p2c/sequential/fvtransportmultiphysics.hh
index fb93249722..4b962db590 100644
--- a/dumux/porousmediumflow/2p2c/sequential/fvtransportmultiphysics.hh
+++ b/dumux/porousmediumflow/2p2c/sequential/fvtransportmultiphysics.hh
@@ -25,7 +25,7 @@
 #define DUMUX_FVTRANSPORT2P2C_MULTIPHYSICS_HH
 
 #include <dumux/porousmediumflow/2p2c/sequential/fvtransport.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 
 namespace Dumux {
 /*!
@@ -236,7 +236,7 @@ void FVTransport2P2CMultiPhysics<TypeTag>::update(const Scalar t, Scalar& dt, Tr
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> > >;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> >, 0/*elementCodim*/>;
     for (int i = 0; i < updateVec.size(); i++)
     {
         DataHandle dataHandle(problem().variables().elementMapper(), updateVec[i]);
diff --git a/dumux/porousmediumflow/sequential/cellcentered/transport.hh b/dumux/porousmediumflow/sequential/cellcentered/transport.hh
index ec0e35e757..fe564c699c 100644
--- a/dumux/porousmediumflow/sequential/cellcentered/transport.hh
+++ b/dumux/porousmediumflow/sequential/cellcentered/transport.hh
@@ -22,7 +22,7 @@
 #include <dune/grid/common/gridenums.hh>
 #include <dumux/porousmediumflow/sequential/transportproperties.hh>
 #include <dumux/porousmediumflow/sequential/properties.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 #include <unordered_map>
 
 /**
@@ -416,7 +416,7 @@ void FVTransport<TypeTag>::update(const Scalar t, Scalar& dt, TransportSolutionT
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using DataHandle = VectorExchange<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> > >;
+    using DataHandle = VectorCommDataHandleEqual<ElementMapper, Dune::BlockVector<Dune::FieldVector<Scalar, 1> >, 0/*elementCodim*/>;
     DataHandle dataHandle(problem_.elementMapper(), updateVec);
     problem_.gridView().template communicate<DataHandle>(dataHandle,
                                                          Dune::InteriorBorder_All_Interface,
@@ -424,7 +424,7 @@ void FVTransport<TypeTag>::update(const Scalar t, Scalar& dt, TransportSolutionT
 
     if (localTimeStepping_)
     {
-    using TimeDataHandle = VectorExchange<ElementMapper, std::vector<LocalTimesteppingData> >;
+    using TimeDataHandle = VectorCommDataHandleEqual<ElementMapper, std::vector<LocalTimesteppingData>, 0/*elementCodim*/>;
 
     TimeDataHandle timeDataHandle(problem_.elementMapper(), timeStepData_);
     problem_.gridView().template communicate<TimeDataHandle>(timeDataHandle,
@@ -549,7 +549,7 @@ void FVTransport<TypeTag>::updatedTargetDt_(Scalar &dt)
     // communicate updated values
     using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
     using ElementMapper = typename SolutionTypes::ElementMapper;
-    using TimeDataHandle = VectorExchange<ElementMapper, std::vector<LocalTimesteppingData> >;
+    using TimeDataHandle = VectorCommDataHandleEqual<ElementMapper, std::vector<LocalTimesteppingData>, 0/*elementCodim*/>;
 
     TimeDataHandle timeDataHandle(problem_.elementMapper(), timeStepData_);
     problem_.gridView().template communicate<TimeDataHandle>(timeDataHandle,
diff --git a/dumux/porousmediumflow/sequential/variableclassadaptive.hh b/dumux/porousmediumflow/sequential/variableclassadaptive.hh
index 43bf606420..cc0f65aa9a 100644
--- a/dumux/porousmediumflow/sequential/variableclassadaptive.hh
+++ b/dumux/porousmediumflow/sequential/variableclassadaptive.hh
@@ -21,7 +21,7 @@
 
 #include <dune/grid/common/partitionset.hh>
 #include <dune/grid/utility/persistentcontainer.hh>
-#include <dumux/linear/vectorexchange.hh>
+#include <dumux/parallel/vectorcommdatahandle.hh>
 #include "variableclass.hh"
 
 /**
@@ -200,7 +200,7 @@ public:
         // communicate ghost data
         using SolutionTypes = GetProp<TypeTag, Properties::SolutionTypes>;
         using ElementMapper = typename SolutionTypes::ElementMapper;
-        using DataHandle = VectorExchange<ElementMapper, std::vector<CellData> >;
+        using DataHandle = VectorCommDataHandleEqual<ElementMapper, std::vector<CellData>, 0/*elementCodim*/>;
         DataHandle dataHandle(problem.elementMapper(), this->cellDataGlobal());
         problem.gridView().template communicate<DataHandle>(dataHandle,
                                                             Dune::InteriorBorder_All_Interface,
-- 
GitLab