Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
// -*- mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
// vi: set et ts=4 sw=4 sts=4:
/*****************************************************************************
* See the file COPYING for full copying permissions. *
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
*****************************************************************************/
/*!
* \file
*
* \brief test for the one-phase CC model
*/
#include <config.h>
#include "1pproblem.hh"
#include <ctime>
#include <iostream>
#include <dune/common/parallel/mpihelper.hh>
#include <dune/common/timer.hh>
#include <dune/grid/io/file/dgfparser/dgfexception.hh>
#include <dune/grid/io/file/vtk.hh>
#include <dune/istl/io.hh>
#include <dumux/common/properties.hh>
#include <dumux/common/parameters.hh>
#include <dumux/common/dumuxmessage.hh>
#include <dumux/common/defaultusagemessage.hh>
#include <dumux/linear/seqsolverbackend.hh>
#include <dumux/assembly/fvassembler.hh>
#include <dumux/io/vtkoutputmodule.hh>
#include <dumux/io/grid/gridmanager.hh>
#include "properties.hh"
int main(int argc, char** argv)
{
using namespace Dumux;
// define the type tag for this problem
using TypeTag = Properties::TTag::OnePIncompressible;
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
// initialize MPI, finalize is done automatically on exit
const auto& mpiHelper = Dune::MPIHelper::instance(argc, argv);
// print dumux start message
if (mpiHelper.rank() == 0)
DumuxMessage::print(/*firstCall=*/true);
// initialize parameter tree
Parameters::init(argc, argv);
//////////////////////////////////////////////////////////////////////
// try to create a grid (from the given grid file or the input file)
/////////////////////////////////////////////////////////////////////
GridManager<GetPropType<TypeTag, Properties::Grid>> gridManager;
gridManager.init();
////////////////////////////////////////////////////////////
// run stationary linear problem on this grid
////////////////////////////////////////////////////////////
// we compute on the leaf grid view
const auto& leafGridView = gridManager.grid().leafGridView();
// create the finite volume grid geometry
using GridGeometry = GetPropType<TypeTag, Properties::GridGeometry>;
auto gridGeometry = std::make_shared<GridGeometry>(leafGridView);
// the problem (initial and boundary conditions)
using Problem = GetPropType<TypeTag, Properties::Problem>;
auto problem = std::make_shared<Problem>(gridGeometry);
// the solution vector
using SolutionVector = GetPropType<TypeTag, Properties::SolutionVector>;
SolutionVector x(gridGeometry->numDofs());
// the grid variables
using GridVariables = GetPropType<TypeTag, Properties::GridVariables>;
auto gridVariables = std::make_shared<GridVariables>(problem, gridGeometry);
gridVariables->init(x);
// intialize the vtk output module
VtkOutputModule<GridVariables, SolutionVector> vtkWriter(*gridVariables, x, problem->name());
using VelocityOutput = GetPropType<TypeTag, Properties::VelocityOutput>;
vtkWriter.addVelocityOutput(std::make_shared<VelocityOutput>(*gridVariables));
using IOFields = GetPropType<TypeTag, Properties::IOFields>;
IOFields::initOutputModule(vtkWriter); //!< Add model specific output fields
vtkWriter.write(0.0);
Dune::Timer timer;
// TODO: dumux-course-task
// change the differentiation method to analytic by changing from DiffMethod::numeric to DiffMethod::analytic
// the assembler for stationary problems
using Assembler = FVAssembler<TypeTag, DiffMethod::analytic>;
auto assembler = std::make_shared<Assembler>(problem, gridGeometry, gridVariables);
// the linear solver
using LinearSolver = ILU0BiCGSTABBackend;
auto linearSolver = std::make_shared<LinearSolver>();
// the discretization matrices for stationary linear problems
using JacobianMatrix = GetPropType<TypeTag, Properties::JacobianMatrix>;
auto A = std::make_shared<JacobianMatrix>();
auto r = std::make_shared<SolutionVector>();
assembler->setLinearSystem(A, r);
assembler->assembleJacobianAndResidual(x);
// we solve Ax = -r to save update and copy
(*r) *= -1.0;
linearSolver->solve(*A, x, *r);
// the grid variables need to be up to date for subsequent output
gridVariables->update(x);
// write vtk output
vtkWriter.write(1.0);
timer.stop();
const auto& comm = leafGridView.comm();
std::cout << "Simulation took " << timer.elapsed() << " seconds on "
<< comm.size() << " processes.\n"
<< "The cumulative CPU time was " << timer.elapsed()*comm.size() << " seconds.\n";
////////////////////////////////////////////////////////////
// print dumux message to say goodbye
////////////////////////////////////////////////////////////
// print dumux end message
if (mpiHelper.rank() == 0)
DumuxMessage::print(/*firstCall=*/false);
return 0;