Commit 8728f7f9 authored by Bernd Flemisch's avatar Bernd Flemisch
Browse files

[io][gmsh][uggrid] fix reading a Gmsh file into a parallel UGGrid

Depending on the Dune version, the boundary markers are present on
all processes (<= 2.6) or on the root process only (>= 2.7). Try to
handle this in a flexible way in the corresponding data handle:
Determine if the minimum size over all processes of the boundary
markers vector is zero. If yes, assume that the root process contains
all markers and broadcast them.

Currently, it isn't possible to refine a parallel UGGrid that has
been read by a GmshReader, see dune-grid issue #83. Therefore, don't
refine in the corresponding test.
parent 64ee82bc
......@@ -138,13 +138,29 @@ struct GmshGridDataHandle<Dune::UGGrid<dimgrid>, GridFactory, Data>
using Grid = Dune::UGGrid<dimgrid>;
using GridView = typename Grid::LevelGridView;
GmshGridDataHandle(const Grid& grid, const GridFactory& gridFactory, Data& elementMarkers)
GmshGridDataHandle(const Grid& grid, const GridFactory& gridFactory, Data& elementMarkers, Data& boundaryMarkers)
: gridView_(grid.levelGridView(0))
, idSet_(grid.localIdSet())
, elementMarkers_(elementMarkers)
, boundaryMarkers_(boundaryMarkers)
{
for (const auto& element : elements(gridView_, Dune::Partitions::interior))
std::swap(elementMarkers_[gridFactory.insertionIndex(element)], data_[idSet_.id(element)]);
// Depending on the Dune version, the boundary markers are present on
// all processes (<= 2.6) or on the root process only (>= 2.7). Try to
// handle this in a flexible way: determine if the minimum size over
// all processes of the boundary markers vector is zero. If yes, assume
// that the root process contains all markers and broadcast them.
auto bmSizeMin = boundaryMarkers_.size();
Dune::MPIHelper::getCollectiveCommunication().min(&bmSizeMin, 1);
if (bmSizeMin == 0)
{
auto bmSize = boundaryMarkers_.size();
Dune::MPIHelper::getCollectiveCommunication().broadcast(&bmSize, 1, 0);
boundaryMarkers_.resize(bmSize);
Dune::MPIHelper::getCollectiveCommunication().broadcast(&boundaryMarkers_.front(), bmSize, 0);
}
}
~GmshGridDataHandle()
......@@ -182,6 +198,7 @@ private:
const GridView gridView_;
const IdSet &idSet_;
Data& elementMarkers_;
Data& boundaryMarkers_;
mutable std::map< typename IdSet::IdType, typename Data::value_type> data_;
};
......
......@@ -202,7 +202,7 @@ public:
template<bool ug = Detail::isUG<Grid>::value, typename std::enable_if_t<ug, int> = 0>
DataHandle createGmshDataHandle()
{
return DataHandle(*gmshGrid_, *gridFactory_, elementMarkers_);
return DataHandle(*gmshGrid_, *gridFactory_, elementMarkers_, boundaryMarkers_);
}
private:
......
......@@ -85,11 +85,9 @@ dune_add_test(NAME test_gridmanager_gmsh_3d_ug_parallel
CMAKE_GUARD dune-uggrid_FOUND
COMMAND ${CMAKE_SOURCE_DIR}/bin/testing/runtest.py
CMD_ARGS --script fuzzy --zeroThreshold {"rank":100}
--command "${MPIEXEC} -np 2 ${CMAKE_CURRENT_BINARY_DIR}/test_gridmanager_gmsh_3d_ug -Problem.Name bifurcation_ug_parallel"
--command "${MPIEXEC} -np 2 ${CMAKE_CURRENT_BINARY_DIR}/test_gridmanager_gmsh_3d_ug -Problem.Name bifurcation_ug_parallel -Grid.Refine false"
--files ${CMAKE_SOURCE_DIR}/test/references/gridmanager-bifurcation-3d-reference.vtu
${CMAKE_CURRENT_BINARY_DIR}/s0002-bifurcation_ug_parallel-00000.pvtu
${CMAKE_SOURCE_DIR}/test/references/gridmanager-bifurcation-3d-reference-refined.vtu
${CMAKE_CURRENT_BINARY_DIR}/s0002-bifurcation_ug_parallel-00001.pvtu)
${CMAKE_CURRENT_BINARY_DIR}/s0002-bifurcation_ug_parallel-00000.pvtu)
add_executable(test_gridmanager_gmsh_e_markers_alu EXCLUDE_FROM_ALL test_gridmanager_gmsh_e_markers.cc)
target_compile_definitions(test_gridmanager_gmsh_e_markers_alu PUBLIC GRIDTYPE=Dune::ALUGrid<2,2,Dune::simplex,Dune::nonconforming>)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment