diff --git a/.gitlab-ci/default.yml b/.gitlab-ci/default.yml
index ff98f7be11cd350cf96ae0db217c7a358eea5c5b..9ea9baf253909ebc80ce86139b4a679e6c785352 100644
--- a/.gitlab-ci/default.yml
+++ b/.gitlab-ci/default.yml
@@ -50,6 +50,7 @@ pylint-flake8 (python):
     - |
       if [ -d build-cmake/python/dumux ] ; then
         pylint --rcfile=.pylintrc build-cmake/python/dumux
+        pylint --rcfile=.pylintrc bin
         flake8 build-cmake/python/dumux
         flake8 bin
       fi
diff --git a/.pylintrc b/.pylintrc
index 0706a7b78be43b211cca8ba591f8b6604b88fd3f..25fed66b8195f91b08a61e13289bef8137d34dfc 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -3,7 +3,7 @@
 # A comma-separated list of package or module names from where C extensions may
 # be loaded. Extensions are loading into the active Python interpreter and may
 # run arbitrary code.
-extension-pkg-allow-list=
+extension-pkg-allow-list=numpy,math
 
 # A comma-separated list of package or module names from where C extensions may
 # be loaded. Extensions are loading into the active Python interpreter and may
@@ -32,7 +32,7 @@ ignore-patterns=
 
 # Python code to execute, usually for sys.path manipulation such as
 # pygtk.require().
-#init-hook=
+init-hook='import sys; sys.path.append("bin/testing")'
 
 # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
 # number of processors available to use.
@@ -153,7 +153,8 @@ disable=print-statement,
         deprecated-sys-function,
         exception-escape,
         comprehension-escape,
-        too-few-public-methods
+        too-few-public-methods,
+        unspecified-encoding
 
 # Enable the message, report, category or checker with the given id(s). You can
 # either give multiple identifier separated by comma (,) or put this option
diff --git a/bin/create_cmakelists.py b/bin/create_cmakelists.py
index e0454908322329174398f3dfbd15430abbf0e7c0..f08cfe2f44d47d8377ef6faf2967fcb608166088 100755
--- a/bin/create_cmakelists.py
+++ b/bin/create_cmakelists.py
@@ -10,7 +10,9 @@ if no folder was specified.
 import os
 import argparse
 
-if __name__ == "__main__":
+
+def createCMakeLists():
+    """Create the CMakeLists.txt files"""
 
     parser = argparse.ArgumentParser()
     parser.add_argument(
@@ -28,7 +30,7 @@ if __name__ == "__main__":
     else:
         rootDir = args["folder"]
 
-    ignore_folders = ["", "io/format/fmt", "io/xml"]
+    ignoreFolders = ["", "io/format/fmt", "io/xml"]
     extensions = [".hh", ".inc"]
     for fullFolderName, subFolders, files in os.walk(rootDir):
         # alphabetically sort
@@ -36,11 +38,11 @@ if __name__ == "__main__":
         files = sorted(files)
         # get folder name relative to dumux
         folderName = fullFolderName.replace(rootDir + "/", "").replace(rootDir, "")
-        if folderName not in ignore_folders:
-            with open(fullFolderName + "/CMakeLists.txt", "w") as cmakelists:
+        if folderName not in ignoreFolders:
+            with open(fullFolderName + "/CMakeLists.txt", "w") as cmakeLists:
                 # add subfolders
                 for subFolder in subFolders:
-                    cmakelists.write("add_subdirectory({})\n".format(subFolder))
+                    cmakeLists.write("add_subdirectory({})\n".format(subFolder))
 
                 headersExist = False
                 for fileName in files:
@@ -51,15 +53,19 @@ if __name__ == "__main__":
 
                 if headersExist:
                     if subFolders:
-                        cmakelists.write("\n")
+                        cmakeLists.write("\n")
                     # collect all files to be installed in a CMake variable
-                    headers_variable = "DUMUX_" + folderName.upper().replace("/", "_") + "_HEADERS"
-                    cmakelists.write(
-                        "file(GLOB {}{})\n".format(headers_variable, " *".join([""] + extensions))
+                    headerGuard = "DUMUX_" + folderName.upper().replace("/", "_") + "_HEADERS"
+                    cmakeLists.write(
+                        "file(GLOB {}{})\n".format(headerGuard, " *".join([""] + extensions))
                     )
-                    cmakelists.write("install(FILES ${{{}}}\n".format(headers_variable))
-                    cmakelists.write(
+                    cmakeLists.write("install(FILES ${{{}}}\n".format(headerGuard))
+                    cmakeLists.write(
                         "        DESTINATION ${{CMAKE_INSTALL_INCLUDEDIR}}/dumux/{})\n".format(
                             folderName
                         )
                     )
+
+
+if __name__ == "__main__":
+    createCMakeLists()
diff --git a/bin/create_dockerimage.py b/bin/create_dockerimage.py
index 8658f48152245d88743c2f1bd1e5dd7466111870..6ca73c89328dcee2a30d9dfc948c1b9164c4693f 100644
--- a/bin/create_dockerimage.py
+++ b/bin/create_dockerimage.py
@@ -1,4 +1,9 @@
 #!/usr/bin/env python3
+
+"""
+Script to create a Docker image from a Dune module
+"""
+
 import os
 import sys
 import string
@@ -12,6 +17,17 @@ from util.moduleinfo import extractModuleInfos
 if sys.version_info[0] < 3:
     sys.exit("\nERROR: Python3 required")
 
+
+def substituteAndWrite(template, target, mapping):
+    """substitute content from template and write to target"""
+    if not os.path.exists(template):
+        sys.exit("Template file '" + template + "' could not be found")
+    with open(target, "w") as targetFile:
+        with open(template) as tmp:
+            raw = string.Template(tmp.read())
+            targetFile.write(raw.substitute(**mapping))
+
+
 if __name__ == "__main__":
 
     # input argument parser
@@ -52,7 +68,7 @@ if __name__ == "__main__":
             "\nA docker folder already exists. " "Continue anyway? - will be overwritten - [y/N]\n"
         )
         delete = input()
-        if delete == "y" or delete == "Y":
+        if delete in ("y", "Y"):
             shutil.rmtree("docker")
             print("--> Deleted old docker folder.")
         else:
@@ -73,46 +89,45 @@ if __name__ == "__main__":
         )
     )
 
-    # substitute content from template and write to target
-    def substituteAndWrite(template, target, mapping):
-        if not os.path.exists(template):
-            sys.exit("Template file '" + template + "' could not be found")
-        with open(target, "w") as targetFile:
-            raw = string.Template(open(template).read())
-            targetFile.write(raw.substitute(**mapping))
-
     # write setpermissions helper script
-    template = os.path.join(templateFolder, "setpermissions.sh.template")
-    target = os.path.join(os.getcwd(), "docker/setpermissions.sh")
-    substituteAndWrite(template, target, {})
+    substituteAndWrite(
+        template=os.path.join(templateFolder, "setpermissions.sh.template"),
+        target=os.path.join(os.getcwd(), "docker/setpermissions.sh"),
+        mapping={},
+    )
     print("--> Created permission helper script for easier container setup.")
 
     # write welcome message file
-    template = os.path.join(templateFolder, "WELCOME.template")
-    target = os.path.join(os.getcwd(), "docker/WELCOME")
-    substituteAndWrite(template, target, {"modName": moduleName, "modFolder": moduleName})
+    substituteAndWrite(
+        template=os.path.join(templateFolder, "WELCOME.template"),
+        target=os.path.join(os.getcwd(), "docker/WELCOME"),
+        mapping={"modName": moduleName, "modFolder": moduleName},
+    )
     print("--> Created welcome message displayed on Docker container startup.")
 
     # write readme file
-    template = os.path.join(templateFolder, "README.md.template")
-    target = os.path.join(os.getcwd(), "docker/README.md")
-    substituteAndWrite(template, target, {"modName": moduleName, "dockerTag": dockerTag})
+    substituteAndWrite(
+        template=os.path.join(templateFolder, "README.md.template"),
+        target=os.path.join(os.getcwd(), "docker/README.md"),
+        mapping={"modName": moduleName, "dockerTag": dockerTag},
+    )
     print("--> Created README.md on how to use the docker image.")
 
     # write helper file for container spin-up (make it executable after creation)
-    template = os.path.join(templateFolder, "docker.sh.template")
-    target = os.path.join(os.getcwd(), "docker/docker_{}.sh".format(dockerTag))
-    substituteAndWrite(template, target, {"dockerTag": dockerTag})
-    os.system("chmod +x " + target)
+    dockerScript = os.path.join(os.getcwd(), "docker/docker_{}.sh".format(dockerTag))
+    substituteAndWrite(
+        template=os.path.join(templateFolder, "docker.sh.template"),
+        target=dockerScript,
+        mapping={"dockerTag": dockerTag},
+    )
+    os.system("chmod +x " + dockerScript)
     print("--> Created helper script to spin up the docker container.")
 
     # write the docker file
-    template = os.path.join(templateFolder, "Dockerfile.template")
-    target = os.path.join(os.getcwd(), "docker/Dockerfile")
     substituteAndWrite(
-        template,
-        target,
-        {
+        template=os.path.join(templateFolder, "Dockerfile.template"),
+        target=os.path.join(os.getcwd(), "docker/Dockerfile"),
+        mapping={
             "modName": moduleName,
             "modMaintainer": moduleMaintainer,
             "dockerTag": dockerTag,
@@ -124,7 +139,7 @@ if __name__ == "__main__":
     print("Do you want to directly build the Docker image? [y/N]")
 
     build = input()
-    if build == "y" or build == "Y":
+    if build in ("y", "Y"):
         print("Building Docker image... this may take several minutes.")
         try:
             os.chdir("docker")
@@ -132,27 +147,24 @@ if __name__ == "__main__":
                 ["docker", "build", "-f", "Dockerfile", "-t", dockerTag, "."], check=True
             )
             os.chdir("../")
-        except Exception:
+        except subprocess.CalledProcessError:
             os.chdir("../")
             sys.exit("ERROR: docker image build failed")
 
-        print()
-        print("Successfully built image: {}. " "Have a look at docker/README.md.".format(dockerTag))
         print(
+            "",
+            f"Successfully built image: {dockerTag}. ",
+            "Have a look at docker/README.md.",
             "Check the container by running "
-            "'docker run -it {} /bin/bash' in the same".format(dockerTag)
-        )
-        print(
+            f"'docker run -it {dockerTag} /bin/bash' in the same "
             "directory as the Dockerfile, and try using the convenience script "
-            "docker_{}.sh".format(dockerTag)
+            f"docker_{dockerTag}.sh",
+            "See docker/README.md for more information.",
         )
-        print("See docker/README.md for more information.")
     else:
         print(
             "You can build your Docker image later by running "
-            "'docker build -f Dockerfile -t {}'".format(dockerTag)
-        )
-        print(
+            f"'docker build -f Dockerfile -t {dockerTag}' "
             "from within the folder 'docker' that was created by this script, "
             "and in which you should find the 'Dockerfile'."
         )
diff --git a/bin/doc/getparameterlist.py b/bin/doc/getparameterlist.py
index 7a9afefe512806fb0a2959462439abcf46dc45ba..d291aaa27ecedbc56e1cd676f52e36e1c9c42552 100644
--- a/bin/doc/getparameterlist.py
+++ b/bin/doc/getparameterlist.py
@@ -5,6 +5,9 @@ Automatically updates parameterlist.txt by searching all *.hh files
 for usage of getParam or getParamFromGroup.
 """
 
+# pylint: skip-file
+# Remove this after rewrite!
+
 import os
 
 
@@ -158,7 +161,7 @@ for key in parameterDict:
     if hasMultiplePT or hasMultipleDV:
         print(
             f"\nFound multiple occurrences of parameter {paramName}",
-            " with differing specifications: "
+            " with differing specifications: ",
         )
     if hasMultiplePT:
         print(" -> Specified type names:")
diff --git a/bin/extract_as_new_module.py b/bin/extract_as_new_module.py
index 414a143258ac5ad02c589fafcac4086318f1e5e0..02e13ce45268d7e36bf6996fc5d5861f12341dab 100755
--- a/bin/extract_as_new_module.py
+++ b/bin/extract_as_new_module.py
@@ -1,4 +1,6 @@
 #!/usr/bin/env python3
+# pylint: disable=redefined-outer-name
+
 """
 This script extracts some specified applications into a separate Dune module.
 For example make a dumux-pub repository accompanying a scientific paper.
@@ -33,7 +35,6 @@ from util.installscript import (
     filterDependencies,
     addDependencyVersions,
     addDependencyPatches,
-    makeScriptWriter,
 )
 
 
@@ -141,8 +142,8 @@ def detectNewModule():
 
 def copySubFolders(subFolder, oldPath, newPath):
     """Copy folders from old path to new path"""
-    for b in subFolder:
-        copy_tree(os.path.join(oldPath, b), os.path.join(newPath, b))
+    for sub in subFolder:
+        copy_tree(os.path.join(oldPath, sub), os.path.join(newPath, sub))
 
 
 def addFoldersToCMakeLists(modulePath, subFolder):
@@ -179,9 +180,9 @@ def addFoldersToCMakeLists(modulePath, subFolder):
 
 def findHeaders(modulePath, sourceFiles):
     """Find header included (recursively) in the given source files"""
-    with mp.Pool() as p:
+    with mp.Pool() as pool:
         headers = itertools.chain.from_iterable(
-            p.map(partial(includedCppProjectHeaders, projectBase=modulePath), sourceFiles)
+            pool.map(partial(includedCppProjectHeaders, projectBase=modulePath), sourceFiles)
         )
     return list(set(headers))
 
@@ -213,8 +214,8 @@ def foldersWithoutSourceFiles(modulePath, checkSubFolder, sources):
         return directory not in sourceDirectories and not hasChildSourceDirectory(directory)
 
     noSourceDirectories = []
-    for sf in checkSubFolder:
-        for root, dirs, _ in os.walk(os.path.join(modulePath, sf)):
+    for sub in checkSubFolder:
+        for root, dirs, _ in os.walk(os.path.join(modulePath, sub)):
             for directory in dirs:
                 directory = os.path.join(root, directory)
                 if isNotASourceDirectory(directory):
@@ -223,11 +224,11 @@ def foldersWithoutSourceFiles(modulePath, checkSubFolder, sources):
 
     def removeEmptyParents():
         folderMap = {}
-        for f in noSourceDirectories:
-            parent = os.path.dirname(f)
+        for folder in noSourceDirectories:
+            parent = os.path.dirname(folder)
             if parent not in folderMap:
                 folderMap[parent] = []
-            folderMap[parent].append(f)
+            folderMap[parent].append(folder)
 
         for parent, folders in folderMap.items():
             found = set(folders)
@@ -355,7 +356,7 @@ def guideRepositoryInitialization(modulePath):
     return remoteURL
 
 
-def dependenciesAndPatches(modulePath, skip=[]):
+def dependenciesAndPatches(modulePath, skip=None):
     """Determine the module's dependencies"""
     try:
         print(
@@ -363,7 +364,7 @@ def dependenciesAndPatches(modulePath, skip=[]):
             " this may take several minutes"
         )
         deps = getDependencies(modulePath)
-        deps = filterDependencies(deps, skip)
+        deps = filterDependencies(deps, skip or [])
         deps = addDependencyVersions(deps, ignoreUntracked=True)
         deps = addDependencyPatches(deps)
     except Exception as exc:
@@ -404,11 +405,10 @@ def guideInstallScriptGeneration(modulePath, dependencies, scriptNameBody):
             modPath=modulePath,
             dependencies=dependencies,
             scriptName=installScriptName,
-            writer=makeScriptWriter(language),
             topFolderName="",
         )
-    except Exception as e:
-        print(f"Error during install script generation: {e}")
+    except Exception as exc:  # pylint: disable=broad-except
+        print(f"Error during install script generation: {exc}")
 
     return installScriptName
 
@@ -480,8 +480,8 @@ def infoInitial(moduleDirectory, subFolder, sourceFiles):
 def infoReadmeMain(moduleDirectory, subFolder, sourceFiles):
     """Main part of the README.md document"""
 
-    def relativePath(p):
-        return os.path.relpath(p, moduleDirectory)
+    def relativePath(path):
+        return os.path.relpath(path, moduleDirectory)
 
     subFolderString = "".join([f"*   `{d}`\n" for d in subFolder])
     sourceString = "".join([f"*   `{relativePath(s)}`\n" for s in sourceFiles])
diff --git a/bin/installdumux.py b/bin/installdumux.py
index 4ccafe1bc12489db0fd876247a6aa965310a1936..bae17a5f0a0ba994d246fe1c73e8efd9646bd037 100755
--- a/bin/installdumux.py
+++ b/bin/installdumux.py
@@ -21,23 +21,25 @@ parser.add_argument("--dune-version", default="2.7", help="Dune version to be ch
 parser.add_argument("--dumux-version", default="3.4", help="Dumux version to be checked out.")
 args = vars(parser.parse_args())
 
-dune_branch = (
+duneBranch = (
     args["dune_version"] if args["dune_version"] == "master" else "releases/" + args["dune_version"]
 )
-dumux_branch = (
+dumuxBranch = (
     args["dumux_version"]
     if args["dumux_version"] == "master"
     else "releases/" + args["dumux_version"]
 )
 
 
-def show_message(message):
+def showMessage(message):
+    """Pretty print message"""
     print("*" * 120)
     print(message)
     print("*" * 120)
 
 
-def check_cpp_version():
+def checkCppVersion():
+    """Check compiler version"""
     requiredversion = "7"
     result = subprocess.check_output(["g++", "-dumpversion"]).decode().strip()
     if LooseVersion(result) < LooseVersion(requiredversion):
@@ -49,52 +51,48 @@ def check_cpp_version():
         )
 
 
-def run_command(command, workdir="."):
+def runCommand(command, workdir="."):
+    """Run command with error checking"""
     with open("../installdumux.log", "a") as log:
-        popen = subprocess.Popen(
+        with subprocess.Popen(
             command,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
+            stdout=log,
+            stderr=log,
             universal_newlines=True,
             cwd=workdir,
-        )
-        for line in popen.stdout:
-            log.write(line)
-            print(line, end="")
-        for line in popen.stderr:
-            log.write(line)
-            print(line, end="")
-        popen.stdout.close()
-        popen.stderr.close()
-        returnCode = popen.wait()
-        if returnCode:
-            message = textwrap.dedent(
-                f"""\
-
-                (Error) The command {command} returned with non-zero exit code
-                  If you can't fix the problem yourself consider reporting your issue
-                  on the mailing list (dumux@listserv.uni-stuttgart.de) and
-                  attach the file 'installdumux.log'
-            """
-            )
-            show_message(message)
-            sys.exit(1)
-
-
-def git_clone(url, branch=None):
+        ) as popen:
+            returnCode = popen.wait()
+            if returnCode:
+                message = textwrap.dedent(
+                    f"""\
+
+                    (Error) The command {command} returned with non-zero exit code
+                    If you can't fix the problem yourself consider reporting your issue
+                    on the mailing list (dumux@listserv.uni-stuttgart.de) and
+                    attach the file 'installdumux.log'
+                    """
+                )
+                showMessage(message)
+                sys.exit(1)
+
+
+def gitClone(url, branch=None):
+    """Clone git repo"""
     clone = ["git", "clone"]
     if branch:
         clone += ["-b", branch]
-    run_command(command=[*clone, url])
+    runCommand(command=[*clone, url])
 
 
-def git_setbranch(folder, branch):
+def gitSetBranch(folder, branch):
+    """Checkout specific git branch"""
     checkout = ["git", "checkout", branch]
-    run_command(command=checkout, workdir=folder)
+    runCommand(command=checkout, workdir=folder)
 
 
 # clear the log file
-open("installdumux.log", "w").close()
+with open("installdumux.log", "w") as _:
+    pass
 
 #################################################################
 #################################################################
@@ -102,7 +100,7 @@ open("installdumux.log", "w").close()
 #################################################################
 #################################################################
 programs = ["git", "gcc", "g++", "cmake", "pkg-config"]
-show_message("(1/3) Checking all prerequistes: " + " ".join(programs) + "...")
+showMessage("(1/3) Checking all prerequistes: " + " ".join(programs) + "...")
 
 # check some prerequistes
 for program in programs:
@@ -115,9 +113,9 @@ if find_executable("paraview") is None:
         "-- Warning: paraview seems to be missing. You may not be able to view simulation results!"
     )
 
-check_cpp_version()
+checkCppVersion()
 
-show_message("(1/3) Step completed. All prerequistes found.")
+showMessage("(1/3) Step completed. All prerequistes found.")
 
 #################################################################
 #################################################################
@@ -128,7 +126,7 @@ show_message("(1/3) Step completed. All prerequistes found.")
 os.makedirs("./dumux", exist_ok=True)
 os.chdir("dumux")
 
-show_message(
+showMessage(
     "(2/3) Cloning repositories. This may take a while. "
     "Make sure to be connected to the internet..."
 )
@@ -136,52 +134,52 @@ show_message(
 # the core modules
 for module in ["common", "geometry", "grid", "localfunctions", "istl"]:
     if not os.path.exists("dune-{}".format(module)):
-        git_clone("https://gitlab.dune-project.org/core/dune-{}.git".format(module), dune_branch)
+        gitClone("https://gitlab.dune-project.org/core/dune-{}.git".format(module), duneBranch)
     else:
         print("-- Skip cloning dune-{} because the folder already exists.".format(module))
-        git_setbranch("dune-{}".format(module), dune_branch)
+        gitSetBranch("dune-{}".format(module), duneBranch)
 
 # dumux
 if not os.path.exists("dumux"):
-    git_clone("https://git.iws.uni-stuttgart.de/dumux-repositories/dumux.git", dumux_branch)
+    gitClone("https://git.iws.uni-stuttgart.de/dumux-repositories/dumux.git", dumuxBranch)
 else:
     print("-- Skip cloning dumux because the folder already exists.")
-    git_setbranch("dumux", dumux_branch)
+    gitSetBranch("dumux", dumuxBranch)
 
 
-show_message("(2/3) Step completed. All repositories have been cloned into a containing folder.")
+showMessage("(2/3) Step completed. All repositories have been cloned into a containing folder.")
 
 #################################################################
 #################################################################
 # (3/3) Configure and build
 #################################################################
 #################################################################
-show_message(
+showMessage(
     "(3/3) Configure and build dune modules and dumux using dunecontrol. "
     "This may take several minutes..."
 )
 
 # run dunecontrol
-run_command(command=["./dune-common/bin/dunecontrol", "--opts=dumux/cmake.opts", "all"])
+runCommand(command=["./dune-common/bin/dunecontrol", "--opts=dumux/cmake.opts", "all"])
 
-show_message("(3/3) Step completed. Succesfully configured and built dune and dumux.")
+showMessage("(3/3) Step completed. Succesfully configured and built dune and dumux.")
 
 #################################################################
 #################################################################
 # Show message how to check that everything works
 #################################################################
 #################################################################
-test_path = "dumux/dumux/build-cmake/test/porousmediumflow/1p"
-if dumux_branch == "master" or LooseVersion(args["dumux_version"]) > LooseVersion("3.3"):
-    test_path += "/isothermal"
+TEST_PATH = "dumux/dumux/build-cmake/test/porousmediumflow/1p"
+if dumuxBranch == "master" or LooseVersion(args["dumux_version"]) > LooseVersion("3.3"):
+    TEST_PATH += "/isothermal"
 else:
-    test_path += "/implicit/isothermal"
+    TEST_PATH += "/implicit/isothermal"
 
-show_message(
+showMessage(
     "(Installation complete) To test if everything works, "
     "please run the following commands (can be copied to command line):\n\n"
-    "  cd {}\n"
+    f"  cd {TEST_PATH}\n"
     "  make test_1p_tpfa\n"
     "  ./test_1p_tpfa\n"
-    "  paraview *pvd\n".format(test_path)
+    "  paraview *pvd\n"
 )
diff --git a/bin/installexternal.py b/bin/installexternal.py
index 416fcfacd971b41554cca07e0a220617979dc49a..decb1be119bcce0c69689f9edbe37110bba40ad0 100755
--- a/bin/installexternal.py
+++ b/bin/installexternal.py
@@ -14,9 +14,12 @@ import argparse
 import textwrap
 
 
+# pylint: disable=C0103,W0212,W0622,C0116
 class ChoicesAction(argparse._StoreAction):
+    """Action to show choices in argparse"""
+
     def __init__(self, **kwargs):
-        super(ChoicesAction, self).__init__(**kwargs)
+        super().__init__(**kwargs)
         if self.choices is None:
             self.choices = []
         self._choices_actions = []
@@ -30,7 +33,11 @@ class ChoicesAction(argparse._StoreAction):
         return self._choices_actions
 
 
-def show_message(message):
+# pylint: enable=C0103,W0212,W0622,C0116
+
+
+def showMessage(message):
+    """Pretty print for info MESSAGES"""
     print("*" * 120)
     print(message)
     print("")
@@ -38,7 +45,7 @@ def show_message(message):
 
 
 if len(sys.argv) == 1:
-    show_message(
+    showMessage(
         "No options given. For more information "
         "run the following command: \n ./installexternal.py --help"
     )
@@ -87,237 +94,237 @@ options.add_argument(
     "--download", action="store_true", default=False, help="Only download the packages."
 )
 
-parser.add_argument("--dune_branch", default="releases/2.7", help="Dune branch to be checked out.")
-parser.add_argument(
-    "--dumux_branch", default="releases/3.4", help="Dumux branch to be checked out."
-)
-parser.add_argument("--opm_branch", default="release/2020.10", help="Opm branch to be checked out.")
-parser.add_argument("--mmesh_branch", default="release/1.2", help="Mmesh branch to be checked out.")
+parser.add_argument("--duneBranch", default="releases/2.7", help="Dune branch to be checked out.")
+parser.add_argument("--dumuxBranch", default="releases/3.4", help="Dumux branch to be checked out.")
+parser.add_argument("--opmBranch", default="release/2020.10", help="Opm branch to be checked out.")
+parser.add_argument("--mmeshBranch", default="release/1.2", help="Mmesh branch to be checked out.")
 
 args = vars(parser.parse_args())
 
 
-def run_command(command, currentdir="."):
-    with open(currentdir + "/installexternal.log", "a") as log:
-        popen = subprocess.Popen(
-            command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
-        )
-        for line in popen.stdout:
-            log.write(line)
-            print(line, end="")
-        for line in popen.stderr:
-            log.write(line)
-            print(line, end="")
-        popen.stdout.close()
-        popen.stderr.close()
-        return_code = popen.wait()
-        if return_code:
-            print("\n")
-            message = textwrap.dedent(
-                f"""\
-                (Error) The command {command} returned with non-zero exit code
-                  If you can't fix the problem yourself consider reporting your issue
-                  on the mailing list (dumux@listserv.uni-stuttgart.de) and
-                  attach the file 'installexternal.log'
-            """
-            )
-            show_message(message)
-            sys.exit(1)
-
-
-def git_clone(url, branch=None):
+def runCommand(command, currentDir="."):
+    """Helper function to run commands with error checking and reporting"""
+    with open(currentDir + "/installexternal.log", "a") as log:
+        with subprocess.Popen(command, stdout=log, stderr=log, universal_newlines=True) as popen:
+            returnCode = popen.wait()
+            if returnCode:
+                message = textwrap.dedent(
+                    f"""
+                    (Error) The command {command} returned with non-zero exit code
+                    If you can't fix the problem yourself consider reporting your issue
+                    on the mailing list (dumux@listserv.uni-stuttgart.de) and
+                    attach the file 'installexternal.log'
+                """
+                )
+                showMessage(message)
+                sys.exit(1)
+
+
+def gitClone(url, branch=None):
+    """Clone a repository from a given URL"""
     clone = ["git", "clone"]
     if branch:
         clone += ["-b", branch]
-    run_command(command=[*clone, url])
-
-
-def install_external(args):
-    dune_branch = args["dune_branch"]
-    dumux_branch = args["dumux_branch"]
-    opm_branch = args["opm_branch"]
-    mmesh_branch = args["mmesh_branch"]
-    packages = args["packages"]
-    cleanup = args["clean"]
-    download = args["download"]
-
-    final_message = []
-    top_dir = os.getcwd()
-    ext_dir = top_dir + "/external"
-
-    # Prepare a list of packages
-    packages = []
-    for pkg in args["packages"]:
-        if pkg in packagenames:
-            packages.extend(packagenames[pkg])
+    runCommand(command=[*clone, url])
+
+
+def branchName(package, parameters):
+    """Get the correct branch name"""
+    # Set the branch
+    if "dumux" in package:
+        return parameters["dumux_branch"]
+    if "mmesh" in package:
+        return parameters["mmesh_branch"]
+    if "dune" in package:
+        return parameters["dune_branch"]
+    if "opm" in package:
+        return parameters["opm_branch"]
+    return ""
+
+
+def cleanPackage(package, finalMessage):
+    """Clean up after a package"""
+    if os.path.isfile(package + ".tar.gz"):
+        os.remove(package + ".tar.gz")
+    if os.path.exists(package):
+        shutil.rmtree(package)
+        finalMessage.append("{} has been removed.".format(package))
+    else:
+        # Save message to be shown at the end
+        finalMessage.append("The folder {} does not exist.".format(package))
+
+
+def filterPackageList(packageListOld):
+    """Filter the package list and add possible dependencies"""
+    packageList = []
+    for pkg in packageListOld:
+        if pkg in PACKAGE_NAMES:
+            packageList.extend(PACKAGE_NAMES[pkg])
         else:
-            packages.extend([key for key in external_urls.keys() if pkg in key])
-    args["packages"] = packages
+            packageList.extend([key for key in EXTERNAL_URLS if pkg in key])
+    return packageList
 
-    # print the list of packages to be downloaded/installed/removed
-    print(
-        "The following package(s) will be {0}:\n".format("removed" if cleanup else "downloaded"),
-        ", ".join(args["packages"]),
-        "\n",
-    )
 
-    # check Location For DuneModules
+def checkLocation():
+    """check call location of this script"""
     if not os.path.isdir("dune-common"):
-        show_message(
+        showMessage(
             "You have to call " + sys.argv[0] + " for " + sys.argv[1] + " from\n"
             "the same directory in which dune-common is located.\n"
             "You cannot install it in this folder."
         )
-        return
+        raise Exception("Script called in wrong location. Aborting.")
+
+
+def installFromTarball(package, parameters, externalDir, finalMessage):
+    """Install a package that uses a tarball as source code archive"""
+    # Download the tarfile
+    with urllib.request.urlopen(EXTERNAL_URLS[package]) as fileData:
+        dataToWrite = fileData.read()
+        with open(externalDir + "/" + package + ".tar.gz", "wb") as file:
+            file.write(dataToWrite)
+
+    # Save message to be shown at the end
+    finalMessage.append("{} has been successfully downloaded.".format(package))
+
+    # Start Installation if the flag download is set to false.
+    if not parameters["download"]:
+        # Extract
+        with tarfile.open(package + ".tar.gz") as tarArchive:
+            tarArchive.extractall()
+            shutil.move(os.path.commonprefix(tarArchive.getnames()), package)  # rename
+
+        # Start the configuration
+        os.chdir(externalDir + "/" + package)
+        if package == "gstat":
+            with open("configure", "r+") as file:
+                content = file.read()
+                file.seek(0)
+                file.truncate()
+                file.write(content.replace("doc/tex/makefile", ""))
+
+        # Run Configuration command
+        configCmd = "./configure" if package != "metis" else ["make", "config"]
+        runCommand(configCmd, currentDir=externalDir)
+        try:
+            runCommand("make", currentDir=externalDir)
+        except subprocess.CalledProcessError as exc:
+            raise Exception("{} installation has failed.".format(package)) from exc
+        # Save message to be shown at the end
+        if os.path.exists(externalDir + "/" + package):
+            finalMessage.append("{} has been successfully installed.".format(package))
+
+
+def installExternal(parameters):
+    """Main driver: install external packages"""
+
+    topDir = os.getcwd()
+    externalDir = topDir + "/external"
+    parameters["packages"] = filterPackageList(parameters["packages"])
+
+    # print the list of packages to be downloaded/installed/removed
+    print(
+        "The following package(s) will be {0}:\n".format(
+            "removed" if parameters["clean"] else "downloaded"
+        ),
+        ", ".join(parameters["packages"]),
+        "\n",
+    )
+
+    checkLocation()
 
     # clear the log file
-    logdir = ext_dir if os.path.exists(ext_dir) else top_dir
-    open(logdir + "/installexternal.log", "w").close()
+    logDir = externalDir if os.path.exists(externalDir) else topDir
+    with open(logDir + "/installexternal.log", "w") as _:
+        pass
 
-    for package in packages:
-        os.chdir(top_dir)
+    finalMessage = []
+    for package in parameters["packages"]:
+        os.chdir(topDir)
         # Package name for final message
-        final_message.append("[---" + package + "---]")
-
-        # Set the directory: create ext_dir for external packages
-        if not any([re.compile(p).match(package) for p in ["dumux", "dune", "opm"]]):
-            os.makedirs(ext_dir, exist_ok=True)
-            os.chdir(ext_dir)
-
-        # Set the branch
-        if "dumux" in package:
-            branch = dumux_branch
-        elif "mmesh" in package:
-            branch = mmesh_branch
-        elif "dune" in package:
-            branch = dune_branch
-        elif "opm" in package:
-            branch = opm_branch
+        finalMessage.append("[---" + package + "---]")
+
+        # Set the directory: create externalDir for external packages
+        if not any(re.compile(p).match(package) for p in ["dumux", "dune", "opm"]):
+            os.makedirs(externalDir, exist_ok=True)
+            os.chdir(externalDir)
+
+        branch = branchName(package, parameters)
 
         # Run the requested command
-        if cleanup:
-            if os.path.isfile(package + ".tar.gz"):
-                os.remove(package + ".tar.gz")
-            if os.path.exists(package):
-                # Remove
-                shutil.rmtree(package)
+        if parameters["clean"]:
+            cleanPackage(package, finalMessage)
+            continue
 
-                # Save message to be shown at the end
-                final_message.append("{} has been removed.".format(package))
+        # Check if tarball
+        tarball = EXTERNAL_URLS[package].endswith("tar.gz")
+
+        if not os.path.exists(package):
+            if tarball:
+                installFromTarball(package, parameters, externalDir, finalMessage)
             else:
+                # Clone from repo
+                gitClone(EXTERNAL_URLS[package], branch)
                 # Save message to be shown at the end
-                final_message.append("The folder {} does not exist.".format(package))
-            continue
-
+                finalMessage.append("{} has been sucessfully cloned.".format(package))
         else:
-            # Check if tarball
-            tarball = external_urls[package].endswith("tar.gz")
-
-            if not os.path.exists(package):
-
-                if tarball:
-
-                    # Download the tarfile
-                    filedata = urllib.request.urlopen(external_urls[package])
-                    datatowrite = filedata.read()
-
-                    with open(ext_dir + "/" + package + ".tar.gz", "wb") as f:
-                        f.write(datatowrite)
-                    # Save message to be shown at the end
-                    final_message.append("{} has been sucessfully downloaded.".format(package))
-
-                    # Start Installation if the flag download is set to false.
-                    if not download:
-                        # Extract
-                        tf = tarfile.open(package + ".tar.gz")
-                        tf.extractall()
-
-                        # Rename
-                        shutil.move(os.path.commonprefix(tf.getnames()), package)
-
-                        # Start the configuration
-                        os.chdir(ext_dir + "/" + package)
-                        if package == "gstat":
-                            with open("configure", "r+") as f:
-                                content = f.read()
-                                f.seek(0)
-                                f.truncate()
-                                f.write(content.replace("doc/tex/makefile", ""))
-
-                        # Run Configuration command
-                        configcmd = "./configure" if package != "metis" else ["make", "config"]
-                        run_command(configcmd, currentdir=ext_dir)
-                        try:
-                            run_command("make", currentdir=ext_dir)
-                        except subprocess.CalledProcessError:
-                            raise Exception("{} installation has failed.".format(package))
-                        # Save message to be shown at the end
-                        if os.path.exists(ext_dir + "/" + package):
-                            final_message.append(
-                                "{} has been successfully installed.".format(package)
-                            )
-
-                else:
-                    # Clone from repo
-                    git_clone(external_urls[package], branch)
-                    # Save message to be shown at the end
-                    final_message.append("{} has been sucessfully cloned.".format(package))
+            if tarball:
+                finalMessage.append("{} has been already installed.".format(package))
             else:
-                if tarball:
-                    final_message.append("{} has been already installed.".format(package))
-                else:
-                    # Checkout to the requested branch
-                    os.chdir(top_dir + "/" + package)
-                    subprocess.Popen(["git", "checkout", branch])
+                # Checkout to the requested branch
+                os.chdir(topDir + "/" + package)
+                with subprocess.Popen(["git", "checkout", branch]) as _:
                     # Save message to be shown at the end
-                    final_message.append(
+                    finalMessage.append(
                         "-- Skip cloning {}, because the folder already exists.".format(package)
                     )
-                    final_message.append("-- Checking out {} ".format(package) + branch)
+                    finalMessage.append("-- Checking out {} ".format(package) + branch)
                     continue
 
         # Save post installation message if there is any.
-        if package in messages.keys():
-            final_message.extend(messages[package])
+        if package in MESSAGES.keys():
+            finalMessage.extend(MESSAGES[package])
 
-        # Change to top_dir
-        os.chdir(top_dir)
+        # Change to topDir
+        os.chdir(topDir)
 
     # Save post installation message about dunecontrol if need be.
-    if not cleanup and any(x in pkg for pkg in packages for x in ["dumux", "dune", "opm"]):
-        final_message.append(
+    if not parameters["clean"] and any(
+        x in pkg for pkg in parameters["packages"] for x in ["dumux", "dune", "opm"]
+    ):
+        finalMessage.append(
             "\n\nPlease run the following command "
             "(can be copied to command line):\n\n  "
             "./dune-common/bin/dunecontrol --opts=./dumux/cmake.opts all"
         )
 
     # If cleanup and only logfile in the external directory, remove the directory
-    if os.path.isdir(ext_dir):
-        _, _, files = next(os.walk(ext_dir))
-        if cleanup and len(files) == 1 and "installexternal.log" in files:
-            shutil.rmtree(ext_dir)
+    if os.path.isdir(externalDir):
+        _, _, files = next(os.walk(externalDir))
+        if parameters["clean"] and len(files) == 1 and "installexternal.log" in files:
+            shutil.rmtree(externalDir)
 
-    return "\n".join(final_message)
+    return "\n".join(finalMessage)
 
 
 #################################################################
 #################################################################
-# (1/3) Define th necessary packages and their urls
+# (1/2) Define the necessary packages and their URLS
 #################################################################
 #################################################################
-dune_git_baseurl = "https://gitlab.dune-project.org/"
-dumux_git_baseurl = "https://git.iws.uni-stuttgart.de/dumux-repositories/"
-external_urls = {
-    "dumux-lecture": dumux_git_baseurl + "dumux-lecture.git",
-    "dumux-course": dumux_git_baseurl + "dumux-course.git",
-    "dune-uggrid": dune_git_baseurl + "/staging/dune-uggrid.git",
-    "dune-alugrid": dune_git_baseurl + "extensions/dune-alugrid.git",
-    "dune-foamgrid": dune_git_baseurl + "extensions/dune-foamgrid.git",
+DUNE_GIT_BASEURL = "https://gitlab.dune-project.org/"
+DUMUX_GIT_BASEURL = "https://git.iws.uni-stuttgart.de/dumux-repositories/"
+EXTERNAL_URLS = {
+    "dumux-lecture": DUMUX_GIT_BASEURL + "dumux-lecture.git",
+    "dumux-course": DUMUX_GIT_BASEURL + "dumux-course.git",
+    "dune-uggrid": DUNE_GIT_BASEURL + "/staging/dune-uggrid.git",
+    "dune-alugrid": DUNE_GIT_BASEURL + "extensions/dune-alugrid.git",
+    "dune-foamgrid": DUNE_GIT_BASEURL + "extensions/dune-foamgrid.git",
     "dune-subgrid": "https://git.imp.fu-berlin.de/agnumpde/dune-subgrid.git",
-    "dune-spgrid": dune_git_baseurl + "extensions/dune-spgrid.git",
-    "dune-mmesh": dune_git_baseurl + "samuel.burbulla/dune-mmesh.git",
-    "dune-functions": dune_git_baseurl + "staging/dune-functions.git",
-    "dune-typetree": dune_git_baseurl + "staging/dune-typetree.git",
+    "dune-spgrid": DUNE_GIT_BASEURL + "extensions/dune-spgrid.git",
+    "dune-mmesh": DUNE_GIT_BASEURL + "samuel.burbulla/dune-mmesh.git",
+    "dune-functions": DUNE_GIT_BASEURL + "staging/dune-functions.git",
+    "dune-typetree": DUNE_GIT_BASEURL + "staging/dune-typetree.git",
     "glpk": "http://ftp.gnu.org/gnu/glpk/glpk-4.60.tar.gz",
     "nlopt": "http://ab-initio.mit.edu/nlopt/nlopt-2.4.2.tar.gz",
     "opm-common": "https://github.com/OPM/opm-common",
@@ -326,7 +333,7 @@ external_urls = {
     "gstat": "http://gstat.org/gstat.tar.gz",
 }
 
-packagenames = {
+PACKAGE_NAMES = {
     "dumux-extensions": ["dumux-lecture", "dumux-course"],
     "dune-extensions": [
         "dune-uggrid",
@@ -343,7 +350,7 @@ packagenames = {
     "others": ["opm-common", "opm-grid", "metis", "gstat"],
 }
 
-messages = {
+MESSAGES = {
     "glpk": [
         "In addition, it might be necessary to set manually",
         "the glpk path in the CMAKE_FLAGS section of the .opts-file:",
@@ -369,16 +376,8 @@ messages = {
 
 #################################################################
 #################################################################
-# (2/3) Download/Config/Clean the requested packages
+# (2/2) Download/config/clean the requested packages
 #################################################################
 #################################################################
 # Start download/configuration/cleaning tasks
-final_message = install_external(args)
-
-#################################################################
-#################################################################
-# (3/3) Show the final message
-#################################################################
-#################################################################
-# Show final message
-show_message(final_message)
+showMessage(installExternal(args))
diff --git a/bin/make_installscript.py b/bin/make_installscript.py
index bba4014331ef5d962d8f3f70d348ec018e34c2cf..a49024e14881fc81ebb90ce7700abe62c6884654 100755
--- a/bin/make_installscript.py
+++ b/bin/make_installscript.py
@@ -17,7 +17,6 @@ from util.installscript import (
     getDefaultScriptName,
     filterDependencies,
     makeInstallScript,
-    makeScriptWriter,
     printProgressInfo,
     printFoundDependencies,
     printFoundVersionInfo,
@@ -26,10 +25,9 @@ from util.installscript import (
 )
 
 
-if __name__ == "__main__":
+def runMakeInstallScript():
+    """ "Generate an install script for a dune-module"""
 
-    ###################
-    # parse arguments
     parser = argparse.ArgumentParser(
         description="This script generates an install script for your module, "
         "taking into account non-published commits & changes.\n"
@@ -113,20 +111,20 @@ if __name__ == "__main__":
         ["Creating install script for module '{}' in folder '{}'".format(modName, modPath)]
     )
 
-    language = cmdArgs["language"]
-    scriptName = cmdArgs.get("filename", None)
-    if not scriptName:
-        scriptName = getDefaultScriptName(modName, language)
+    scriptName = cmdArgs.get("filename", getDefaultScriptName(modName, cmdArgs["language"]))
 
     makeInstallScript(
         modPath=modPath,
         dependencies=deps,
         scriptName=scriptName,
-        writer=makeScriptWriter(language),
         topFolderName=cmdArgs.get("topfoldername", None),
         optsFile=cmdArgs.get("optsFile", None),
     )
 
     subprocess.call(["chmod", "u+x", scriptName])
     printProgressInfo([f"Successfully created install script '{scriptName}'"])
-    printFinalMessage(scriptName, cmdArgs.get("topfoldername", None))
+    printFinalMessage(cmdArgs.get("topfoldername", None))
+
+
+if __name__ == "__main__":
+    runMakeInstallScript()
diff --git a/bin/postprocessing/exportscreenshot2d.py b/bin/postprocessing/exportscreenshot2d.py
index 8ff909a34270b04ce86e4db47d0edeffa4f42cfb..7172c2744f497f7cf8504476929e3c30faf65f1d 100644
--- a/bin/postprocessing/exportscreenshot2d.py
+++ b/bin/postprocessing/exportscreenshot2d.py
@@ -1,14 +1,14 @@
+# pylint: skip-file
 """
 Script for exporting 2d screenshots from ParaView
 
-TODO:
+Make add the following:
 - different colors for legend
 - read-in pvds with time outputs
 - read-in multiple vtus, e.g. for multidomain
 - rendering method 2d and 3d
 """
 
-# parse arguments
 import argparse
 import os
 import sys
@@ -28,7 +28,6 @@ try:
 except ImportError:
     print("`paraview.simple` not found. Make sure using pvbatch.")
 
-bool = ["True", "False"]
 parameterType = ["CELLS", "POINTS"]
 legendOrientation = ["Horizontal", "Vertical"]
 parser = argparse.ArgumentParser(
@@ -213,7 +212,7 @@ for curFile in args["files"]:
             print(vtuFile.CellArrayStatus)
         else:
             print(vtuFile.PointArrayStatus)
-        exit(1)
+        sys.exit(1)
 
     # get active view
     renderView1 = GetActiveView()
diff --git a/bin/postprocessing/extractlinedata.py b/bin/postprocessing/extractlinedata.py
index 041fb6e1c78a761b6a5ee8da6b3736d7ffe6abf2..f96ec365c05983c05450a8ef71bb2ea8af4a27ef 100644
--- a/bin/postprocessing/extractlinedata.py
+++ b/bin/postprocessing/extractlinedata.py
@@ -1,3 +1,7 @@
+"""
+Use paraview to extract data along a line
+"""
+
 import argparse
 import csv
 import sys
@@ -11,8 +15,11 @@ try:
         PlotOverLine,
         CreateWriter,
     )
-except ImportError:
-    raise ImportError("`paraview.simple` not found. Make sure using pvpython instead of python.")
+except ImportError as exc:
+    raise ImportError(
+        "`paraview.simple` not found. Make sure using pvpython instead of python."
+    ) from exc
+
 
 # parse arguments
 parser = argparse.ArgumentParser(
@@ -65,7 +72,7 @@ if outDirectory.strip():
     os.makedirs(outDirectory, exist_ok=True)
 
 # loop over all vtk files
-counter = 0
+COUNTER = 0
 for curFile in args["files"]:
 
     # if no output directory was specified, use the directory of the given file
@@ -79,16 +86,16 @@ for curFile in args["files"]:
             curOutDirectory, os.path.splitext(os.path.basename(curFile))[0] + ".csv"
         )
     elif len(args["files"]) > 1:
-        csvFileName = os.path.join(curOutDirectory, args["outFile"] + "_" + str(counter) + ".csv")
+        csvFileName = os.path.join(curOutDirectory, args["outFile"] + "_" + str(COUNTER) + ".csv")
     else:
         csvFileName = os.path.join(curOutDirectory, args["outFile"] + ".csv")
-    counter += 1
+    COUNTER += 1
 
     # print progress to command line
     if args["verbosity"] == 1:
         print(
             "Processing file ({}/{}): {}".format(
-                counter, len(args["files"]), os.path.basename(curFile)
+                COUNTER, len(args["files"]), os.path.basename(curFile)
             )
         )
 
diff --git a/bin/postprocessing/extractpointdataovertime.py b/bin/postprocessing/extractpointdataovertime.py
index 1063c8646325f631bda5c6ef3e5159ec11ed5901..41ea6d68d6a8012792320304604c775053adecbf 100644
--- a/bin/postprocessing/extractpointdataovertime.py
+++ b/bin/postprocessing/extractpointdataovertime.py
@@ -1,3 +1,9 @@
+# pylint: skip-file
+
+"""
+Extract data at probe location over time interval using Paraview
+"""
+
 import argparse
 import csv
 import os
diff --git a/bin/postprocessing/l2error.py b/bin/postprocessing/l2error.py
index bd1c1a0675c87ba01cc11a8fed71c55365b28659..0d3e8505994f1cd32d0bcb8dbf8fda972549f9db 100644
--- a/bin/postprocessing/l2error.py
+++ b/bin/postprocessing/l2error.py
@@ -1,164 +1,205 @@
+"""
+Helper script to compute discrete l2 error from output files
+Note: better do this in the code for increased precision
+"""
+
 import argparse
 import csv
 import sys
 
-# Auxiliary function that provides a handy parser
-parser = argparse.ArgumentParser(
-    prog="python " + sys.argv[0],
-    description="Calculate the l2 error of csv data files.",
-)
-parser.add_argument("-f1", "--reference", type=str, required=True, help="Reference csv-file")
-parser.add_argument("-f2", "--newSolution", type=str, required=True, help="NewSolution csv-file")
-parser.add_argument(
-    "-xMin",
-    "--xMin",
-    type=float,
-    required=False,
-    default=-1e99,
-    help="Restrict data to x>xMin",
-)
-parser.add_argument(
-    "-xMax",
-    "--xMax",
-    type=float,
-    required=False,
-    default=1e99,
-    help="Restrict data to x>xMax",
-)
-group1 = parser.add_mutually_exclusive_group(required=True)
-group1.add_argument("-x1", "--xData1", type=int, help="Column index of x data in reference")
-group1.add_argument("-x1Name", "--xDataName1", type=str, help="Name x data in reference")
-group2 = parser.add_mutually_exclusive_group(required=True)
-group2.add_argument("-x2", "--xData2", type=int, help="Column index of x data in newSolution")
-group2.add_argument("-x2Name", "--xDataName2", type=str, help="Name x data in newSolution")
-group3 = parser.add_mutually_exclusive_group(required=True)
-group3.add_argument("-y1", "--yData1", type=int, help="Column index of y data in reference")
-group3.add_argument("-y1Name", "--yDataName1", type=str, help="Name y data in reference")
-group4 = parser.add_mutually_exclusive_group(required=True)
-group4.add_argument("-y2", "--yData2", type=int, help="Column index of y data in newSolution")
-group4.add_argument("-y2Name", "--yDataName2", type=str, help="Name y data in newSolution")
-parser.add_argument("-p", "--percent", action="store_true", help="Print errors in percent")
-parser.add_argument("-f", "--force", action="store_true", help="Ignore 'not-matching' errors")
-parser.add_argument("-v", "--verbose", action="store_true", help="Verbosity of the script")
-args = vars(parser.parse_args())
-
-with open(args["reference"], "rb") as referenceFile:
-    reader = csv.reader(referenceFile)
-    reference = list(reader)
+
+def parseCommandLine():
+    """Auxiliary function that parses command line arguments"""
+
+    parser = argparse.ArgumentParser(
+        prog="python " + sys.argv[0],
+        description="Calculate the l2 error of csv data files.",
+    )
+    parser.add_argument("-f1", "--reference", type=str, required=True, help="Reference csv-file")
+    parser.add_argument(
+        "-f2", "--newSolution", type=str, required=True, help="NewSolution csv-file"
+    )
+    parser.add_argument(
+        "-xMin",
+        "--xMin",
+        type=float,
+        required=False,
+        default=-1e99,
+        help="Restrict data to x>xMin",
+    )
+    parser.add_argument(
+        "-xMax",
+        "--xMax",
+        type=float,
+        required=False,
+        default=1e99,
+        help="Restrict data to x>xMax",
+    )
+    group1 = parser.add_mutually_exclusive_group(required=True)
+    group1.add_argument("-x1", "--xData1", type=int, help="Column index of x data in reference")
+    group1.add_argument("-x1Name", "--xDataName1", type=str, help="Name x data in reference")
+    group2 = parser.add_mutually_exclusive_group(required=True)
+    group2.add_argument("-x2", "--xData2", type=int, help="Column index of x data in newSolution")
+    group2.add_argument("-x2Name", "--xDataName2", type=str, help="Name x data in newSolution")
+    group3 = parser.add_mutually_exclusive_group(required=True)
+    group3.add_argument("-y1", "--yData1", type=int, help="Column index of y data in reference")
+    group3.add_argument("-y1Name", "--yDataName1", type=str, help="Name y data in reference")
+    group4 = parser.add_mutually_exclusive_group(required=True)
+    group4.add_argument("-y2", "--yData2", type=int, help="Column index of y data in newSolution")
+    group4.add_argument("-y2Name", "--yDataName2", type=str, help="Name y data in newSolution")
+    parser.add_argument("-p", "--percent", action="store_true", help="Print errors in percent")
+    parser.add_argument("-f", "--force", action="store_true", help="Ignore 'not-matching' errors")
+    parser.add_argument("-v", "--verbose", action="store_true", help="Verbosity of the script")
+    return vars(parser.parse_args())
+
+
+def computeIndices(reference, args):
+    """Compute the indices of the reference data"""
     if args["xDataName1"] is not None:
-        indexReferenceX = reference[0].index(args["xDataName1"])
+        iRefX = reference[0].index(args["xDataName1"])
     else:
-        indexReferenceX = args["xData1"]
+        iRefX = args["xData1"]
     if args["yDataName1"] is not None:
-        indexReferenceY = reference[0].index(args["yDataName1"])
+        iRefY = reference[0].index(args["yDataName1"])
     else:
-        indexReferenceY = args["yData1"]
+        iRefY = args["yData1"]
 
-with open(args["newSolution"], "rb") as newSolutionFile:
-    reader = csv.reader(newSolutionFile)
-    newSolution = list(reader)
     if args["xDataName2"] is not None:
-        indexNewSolutionX = reference[0].index(args["xDataName2"])
+        iSolX = reference[0].index(args["xDataName2"])
     else:
-        indexNewSolutionX = args["xData2"]
+        iSolX = args["xData2"]
     if args["yDataName2"] is not None:
-        indexNewSolutionY = reference[0].index(args["yDataName2"])
+        iSolY = reference[0].index(args["yDataName2"])
     else:
-        indexNewSolutionY = args["yData2"]
-
-if reference[0][indexReferenceX] != reference[0][indexNewSolutionX] and not args["force"]:
-    print(
-        "X-Identifier not equal: ref=",
-        reference[0][indexReferenceX],
-        ",new=",
-        reference[0][indexNewSolutionX],
-        ". Aborting! (Use -f to continue anyway)",
-    )
-    exit(1)
-
-if reference[0][indexReferenceY] != newSolution[0][indexNewSolutionY] and not args["force"]:
-    print(
-        "Y-Identifier not equal. ref=",
-        reference[0][indexReferenceY],
-        ",new=",
-        newSolution[0][indexNewSolutionY],
-        ". Aborting! (Use -f to continue anyway)",
-    )
-    exit(2)
-
-if len(reference) != len(newSolution):
-    print(
-        "Length of reference and newSolution not equal: ref=",
-        len(reference),
-        ",new=",
-        len(newSolution),
-        ". Aborting!",
+        iSolY = args["yData2"]
+
+    return (
+        (iRefX, iRefY),
+        (iSolX, iSolY),
     )
-    exit(3)
-
-distanceOld = 0.0
-sumError = 0.0
-sumReference = 0.0
-sumDistance = 0.0
-numPoints = 0
-
-for i in range(1, len(reference)):
-    coord_ref = float(reference[i][indexReferenceX])
-    coord_newSolution = float(newSolution[i][indexNewSolutionX])
-    if coord_ref != coord_newSolution:
+
+
+def readL2ErrorData(args):
+    """Compute L2 error: main driver"""
+
+    with open(args["reference"], "rb") as referenceFile:
+        reference = list(csv.reader(referenceFile))
+    with open(args["newSolution"], "rb") as newSolutionFile:
+        newSolution = list(csv.reader(newSolutionFile))
+
+    iRef, iSol = computeIndices(reference, args)
+
+    if reference[0][iRef[0]] != reference[0][iSol[0]] and not args["force"]:
         print(
-            "Coordinates not equal: ref=",
-            coord_ref,
+            "X-Identifier not equal: ref=",
+            reference[0][iRef[0]],
             ",new=",
-            coord_newSolution,
+            reference[0][iSol[0]],
+            ". Aborting! (Use -f to continue anyway)",
+        )
+        sys.exit(1)
+
+    if reference[0][iRef[1]] != newSolution[0][iSol[0]] and not args["force"]:
+        print(
+            "Y-Identifier not equal. ref=",
+            reference[0][iRef[1]],
+            ",new=",
+            newSolution[0][iSol[1]],
+            ". Aborting! (Use -f to continue anyway)",
+        )
+        sys.exit(2)
+
+    if len(reference) != len(newSolution):
+        print(
+            "Length of reference and newSolution not equal: ref=",
+            len(reference),
+            ",new=",
+            len(newSolution),
             ". Aborting!",
         )
-        exit(4)
-    if coord_ref < float(args["xMin"]) or coord_ref > float(args["xMax"]):
-        continue
+        sys.exit(3)
+
+    return {
+        "reference": reference,
+        "iRef": iRef,
+        "newSolution": newSolution,
+        "iSol": iSol,
+    }
+
+
+def computeL2ErrorSquared(args, reference, iRef, newSolution, iSol):
+    """Compute L2 error"""
+    sumError = 0.0
+    sumReference = 0.0
+    sumDistance = 0.0
+    numPoints = 0
 
-    if i == 1:
-        distance = 0.5 * (
-            float(reference[2][indexReferenceX]) - float(reference[1][indexReferenceX])
+    for i in range(1, len(reference)):
+        coordReference = float(reference[i][iRef[0]])
+        coordNewSolution = float(newSolution[i][iSol[0]])
+        if coordReference != coordNewSolution:
+            print(
+                "Coordinates not equal: ref=",
+                coordReference,
+                ",new=",
+                coordNewSolution,
+                ". Aborting!",
+            )
+            sys.exit(4)
+        if coordReference < float(args["xMin"]) or coordReference > float(args["xMax"]):
+            continue
+
+        if i == 1:
+            distance = 0.5 * (float(reference[2][iRef[0]]) - float(reference[1][iRef[0]]))
+        elif i == len(reference) - 1:
+            distA = float(reference[len(reference) - 1][iRef[0]])
+            distB = float(reference[len(reference) - 2][iRef[0]])
+            distance = 0.5 * (distA - distB)
+        else:
+            distance = 0.5 * (float(reference[i + 1][iRef[0]]) - float(reference[i - 1][iRef[0]]))
+        sumError += (
+            (float(reference[i][iRef[1]]) - float(newSolution[i][iSol[1]])) ** 2
+        ) * distance
+        sumReference += ((float(reference[i][iRef[1]])) ** 2) * distance
+        sumDistance += distance
+        numPoints += 1
+
+    if numPoints < 999 and not args["force"]:
+        print(
+            "Warning: numPoints=",
+            numPoints,
+            " is low, could result in bad the error approximation."
+            " (Use -f to suppress this warning)",
+        )
+
+    return {"absolute": sumError / sumDistance, "relative": sumError / sumReference}
+
+
+def printL2Error(args, absolute, relative):
+    """Print L2 error"""
+
+    # numPoints is needed, resulting from the equidistant integration
+    l2normAbs = (absolute) ** 0.5
+    # numPoints cancels out for equidistant integration
+    l2normRel = (relative) ** 0.5
+
+    if args["percent"]:
+        print(
+            "L2_Error_in_%: ",
+            "{0:.5f}%".format(l2normAbs * 100),
+            "Rel_L2_Error_in_%: ",
+            "{0:.5f}%".format(l2normRel * 100),
         )
-    elif i == len(reference) - 1:
-        distA = float(reference[len(reference) - 1][indexReferenceX])
-        distB = float(reference[len(reference) - 2][indexReferenceX])
-        distance = 0.5 * (distA - distB)
     else:
-        distance = 0.5 * (
-            float(reference[i + 1][indexReferenceX]) - float(reference[i - 1][indexReferenceX])
+        print(
+            "L2_Error: ",
+            "{0:.5e}".format(l2normAbs),
+            " Rel_L2_Error: ",
+            "{0:.5e}".format(l2normRel),
         )
-    sumError += (
-        (float(reference[i][indexReferenceY]) - float(newSolution[i][indexNewSolutionY])) ** 2
-    ) * distance
-    sumReference += ((float(reference[i][indexReferenceY])) ** 2) * distance
-    sumDistance += distance
-    numPoints += 1
-
-if numPoints < 999 and not args["force"]:
-    print(
-        "Warning: numPoints=",
-        numPoints,
-        " is low, could result in bad the error approximation. (Use -f to suppress this warning)",
-    )
 
-l2normAbs = (
-    sumError / sumDistance
-) ** 0.5  # numPoints is needed, resulting from the equidistant integration
-l2normRel = (sumError / sumReference) ** 0.5  # numPoints cancels out for equidistant integration
-
-if args["percent"]:
-    print(
-        "L2_Error_in_%: ",
-        "{0:.5f}%".format(l2normAbs * 100),
-        "Rel_L2_Error_in_%: ",
-        "{0:.5f}%".format(l2normRel * 100),
-    )
-else:
-    print(
-        "L2_Error: ",
-        "{0:.5e}".format(l2normAbs),
-        " Rel_L2_Error: ",
-        "{0:.5e}".format(l2normRel),
-    )
+
+if __file__ == "__main__":
+    cmdArgs = parseCommandLine()
+    error = computeL2ErrorSquared(cmdArgs, **readL2ErrorData(cmdArgs))
+    printL2Error(cmdArgs, **error)
diff --git a/bin/remove_clutter_after_last_endif.py b/bin/remove_clutter_after_last_endif.py
index c21bbff427aa3a5a3f4175594edffa482d1a871f..15fb3c31abe0b846e418adb9122e87773e77ee4c 100644
--- a/bin/remove_clutter_after_last_endif.py
+++ b/bin/remove_clutter_after_last_endif.py
@@ -1,17 +1,29 @@
 #!/usr/bin/env python3
+
+""""
+Remove clutter after the last #endif (header guard)
+in C++ header files
+"""
+
 import os
 
 
-# replace everything after last #endif with new line
-def clearAfterLastEndIf(filename):
-    with open(filename, "r") as header:
+def clearAfterLastEndIf(fileName):
+    """Clear a single headerfile with name fileName"""
+    with open(fileName, "r") as header:
         split = header.read().split("#endif")
         split[-1] = "\n"
-    with open(filename, "w") as header:
+    with open(fileName, "w") as header:
         header.write("#endif".join(split))
 
 
-for root, _, files in os.walk(os.getcwd()):
-    for file in files:
-        if file.endswith(".hh"):
-            clearAfterLastEndIf(os.path.join(root, file))
+def run():
+    """Main driver: go through all header in directory recursively"""
+    for root, _, files in os.walk(os.getcwd()):
+        for file in files:
+            if file.endswith(".hh"):
+                clearAfterLastEndIf(os.path.join(root, file))
+
+
+if __name__ == "__main__":
+    run()
diff --git a/bin/testing/findtests.py b/bin/testing/findtests.py
index b01946db7d0a154e996654fd1974543dcac5b87d..91126234dd47522cc6b8172bd8f8ab1b9e3c8913 100755
--- a/bin/testing/findtests.py
+++ b/bin/testing/findtests.py
@@ -19,13 +19,13 @@ from multiprocessing import Pool
 from functools import partial
 
 
-# Check if the set a contains a member of list b
 def hasCommonMember(myset, mylist):
+    """Check if the set a contains a member of list b"""
     return not myset.isdisjoint(mylist)
 
 
-# make dry run and return the compilation command
 def getCompileCommand(testConfig, buildTreeRoot="."):
+    """make dry run and return the compilation command"""
     target = testConfig["target"]
     lines = subprocess.check_output(
         ["make", "-B", "--dry-run", target], encoding="ascii", cwd=buildTreeRoot
@@ -35,26 +35,26 @@ def getCompileCommand(testConfig, buildTreeRoot="."):
         return any(cpp in line for cpp in ["g++", "clang++"])
 
     # there may be library build commands first, last one is the actual target
-    commands = list(filter(lambda line: hasCppCommand(line), lines))
+    commands = list(filter(hasCppCommand, lines))
     return commands[-1] if commands else None
 
 
-# get the command and folder to compile the given test
 def buildCommandAndDir(testConfig, buildTreeRoot="."):
+    """get the command and folder to compile the given test"""
     compCommand = getCompileCommand(testConfig, buildTreeRoot)
     if compCommand is None:
         raise Exception("Could not determine compile command for {}".format(testConfig))
-    else:
-        (_, dir), command = [comm.split() for comm in compCommand.split("&&")]
-        return command, dir
+
+    (_, directory), command = [comm.split() for comm in compCommand.split("&&")]
+    return command, directory
 
 
-# check if a test is affected by changes in the given files
 def isAffectedTest(testConfigFile, changedFiles, buildTreeRoot="."):
+    """check if a test is affected by changes in the given files"""
     with open(testConfigFile) as configFile:
         testConfig = json.load(configFile)
 
-    command, dir = buildCommandAndDir(testConfig, buildTreeRoot)
+    command, directory = buildCommandAndDir(testConfig, buildTreeRoot)
     mainFile = command[-1]
 
     # detect headers included in this test
@@ -62,7 +62,12 @@ def isAffectedTest(testConfigFile, changedFiles, buildTreeRoot="."):
     # -H  prints the name(+path) of each used header
     # for some reason g++ writes to stderr
     headers = subprocess.run(
-        command + ["-MM", "-H"], stderr=PIPE, stdout=PIPE, cwd=dir, encoding="ascii"
+        command + ["-MM", "-H"],
+        stderr=PIPE,
+        stdout=PIPE,
+        cwd=directory,
+        encoding="ascii",
+        check=False,
     ).stderr.splitlines()
     headers = [h.lstrip(". ") for h in headers]
     headers.append(mainFile)
@@ -107,11 +112,11 @@ if __name__ == "__main__":
     buildDir = os.path.abspath(args["build_dir"])
     targetFile = os.path.abspath(args["outfile"])
     with open(args["file_list"]) as files:
-        changedFiles = set([line.strip("\n") for line in files.readlines()])
+        changedFileList = set(line.strip("\n") for line in files.readlines())
 
     # clean build directory
-    subprocess.run(["make", "clean"], cwd=buildDir)
-    subprocess.run(["make", "all"], cwd=buildDir)
+    subprocess.run(["make", "clean"], cwd=buildDir, check=False)
+    subprocess.run(["make", "all"], cwd=buildDir, check=False)
 
     # detect affected tests
     print("Detecting affected tests:")
@@ -119,12 +124,14 @@ if __name__ == "__main__":
     tests = glob(os.path.join(buildDir, "TestMetaData") + "/*json")
 
     numProcesses = max(1, args["num_processes"])
-    findAffectedTest = partial(isAffectedTest, changedFiles=changedFiles, buildTreeRoot=buildDir)
-    with Pool(processes=numProcesses) as p:
-        for affected, name, target in p.imap_unordered(findAffectedTest, tests, chunksize=4):
+    findAffectedTest = partial(isAffectedTest, changedFiles=changedFileList, buildTreeRoot=buildDir)
+    with Pool(processes=numProcesses) as pool:
+        for affected, name, cmakeTarget in pool.imap_unordered(
+            findAffectedTest, tests, chunksize=4
+        ):
             if affected:
-                affectedTests[name] = {"target": target}
-                print("\t- {} (target: {})".format(name, target))
+                affectedTests[name] = {"target": cmakeTarget}
+                print("\t- {} (target: {})".format(name, cmakeTarget))
 
     print("Detected {} affected tests".format(len(affectedTests)))
 
diff --git a/bin/testing/fuzzycomparedata.py b/bin/testing/fuzzycomparedata.py
index 5d2207eb664e17c54b0f860e46f5a79b26b334d6..36e25aa12e797a110df7f31ae5306426e7654950 100644
--- a/bin/testing/fuzzycomparedata.py
+++ b/bin/testing/fuzzycomparedata.py
@@ -9,18 +9,18 @@ import argparse
 import csv
 import json
 import sys
-from fuzzycomparevtu import is_fuzzy_equal_text
+from fuzzycomparevtu import isFuzzyEqualText
 
 
-def compare_data(
+def compareData(
     dataFile1,
     dataFile2,
     delimiter,
     absolute=1.5e-7,
     relative=1e-2,
-    zeroValueThreshold={},
+    zeroValueThreshold=None,
     verbose=True,
-):
+):  # pylint: disable=too-many-arguments
     """take two data files and compare them. Returns an exit key as returnvalue.
 
     Arguments:
@@ -52,9 +52,13 @@ def compare_data(
             f"a maximum absolute error of {absolute}*max_abs_parameter_value."
         )
 
+    zeroValueThreshold = zeroValueThreshold or {}
+
     # construct element tree from data files
-    data1 = list(csv.reader(open(dataFile1, "r"), delimiter=delimiter))
-    data2 = list(csv.reader(open(dataFile2, "r"), delimiter=delimiter))
+    with open(dataFile1, "r") as data1:
+        data1 = list(csv.reader(data1, delimiter=delimiter))
+    with open(dataFile1, "r") as data2:
+        data2 = list(csv.reader(data2, delimiter=delimiter))
 
     if len(data1) != len(data2):
         print(
@@ -64,28 +68,32 @@ def compare_data(
             len(data2),
             ". Aborting!",
         )
-        exit(3)
+        sys.exit(3)
 
-    is_equal = True
+    isEqual = True
     for i in range(0, len(data1[0])):
-        a = data1[0][i]
-        b = data2[0][i]
+        valueA = data1[0][i]
+        valueB = data2[0][i]
         for j in range(1, len(data1)):
-            a += " {0}".format(data1[j][i])
-            b += " {0}".format(data2[j][i])
+            valueA += " {0}".format(data1[j][i])
+            valueB += " {0}".format(data2[j][i])
 
-        if not is_fuzzy_equal_text(
-            a, b, "row {0}".format(i), len(data1), absolute, relative, zeroValueThreshold, verbose
+        if not isFuzzyEqualText(
+            valueA,
+            valueB,
+            "row {0}".format(i),
+            len(data1),
+            absolute,
+            relative,
+            zeroValueThreshold,
+            verbose,
         ):
             if verbose:
-                is_equal = False
+                isEqual = False
             else:
                 return False
 
-    if is_equal:
-        return 0
-    else:
-        return 1
+    return 0 if isEqual else 1
 
 
 # main program if called as script return appropriate error codes
@@ -123,7 +131,7 @@ if __name__ == "__main__":
     args = vars(parser.parse_args())
 
     sys.exit(
-        compare_data(
+        compareData(
             args["data_file_1"],
             args["data_file_2"],
             args["delimiter"],
diff --git a/bin/testing/fuzzycomparevtu.py b/bin/testing/fuzzycomparevtu.py
index f8c04e311ecf07e1cba6c39733ebafc21bb9c634..d7bef7db6cac317b47fda3ef6aef05b0df149f9d 100644
--- a/bin/testing/fuzzycomparevtu.py
+++ b/bin/testing/fuzzycomparevtu.py
@@ -14,8 +14,12 @@ import math
 import os
 import functools
 
+# Note: these issues can be improved on by factoring out functions
+# but we ignore it for know ("legacy code")
+# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
 
-def compare_vtk(vtk1, vtk2, absolute=1.5e-7, relative=1e-2, zeroValueThreshold={}, verbose=True):
+
+def compareVTK(vtk1, vtk2, absolute=1.5e-7, relative=1e-2, zeroValueThreshold=None, verbose=True):
     """take two vtk files and compare them. Returns an exit key as returnvalue.
 
     Arguments:
@@ -43,22 +47,26 @@ def compare_vtk(vtk1, vtk2, absolute=1.5e-7, relative=1e-2, zeroValueThreshold={
     """
 
     # construct element tree from vtk file
-    root1 = ET.fromstring(open(vtk1).read())
-    root2 = ET.fromstring(open(vtk2).read())
+    with open(vtk1) as vtk1File:
+        root1 = ET.fromstring(vtk1File.read())
+    with open(vtk2) as vtk2File:
+        root2 = ET.fromstring(vtk2File.read())
+
+    zeroValueThreshold = zeroValueThreshold or {}
 
     # convert parallel vtu to sequential vtu if necessary
     convertedFromParallelVtu = False
     if vtk1.endswith(".pvtu"):
-        root1 = convert_pvtu_to_vtu(root1, vtk1)
+        root1 = convertPVTUToVTU(root1, vtk1)
         convertedFromParallelVtu = True
     if vtk2.endswith(".pvtu"):
-        root2 = convert_pvtu_to_vtu(root2, vtk2)
+        root2 = convertPVTUToVTU(root2, vtk2)
         convertedFromParallelVtu = True
 
     # sort the vtk file in case nodes appear in different positions
     # e.g. because of minor changes in the output code
-    sortedroot1 = sort_vtk(root1)
-    sortedroot2 = sort_vtk(root2)
+    sortedroot1 = sortVTK(root1)
+    sortedroot2 = sortVTK(root2)
 
     if verbose:
         print(
@@ -69,12 +77,12 @@ def compare_vtk(vtk1, vtk2, absolute=1.5e-7, relative=1e-2, zeroValueThreshold={
 
     # sort the vtk file so that the comparison is independent of the
     # index numbering (coming e.g. from different grid managers)
-    sortedroot1, sortedroot2 = sort_vtk_by_coordinates(
+    sortedroot1, sortedroot2 = sortVTKByCoordinates(
         sortedroot1, sortedroot2, verbose, convertedFromParallelVtu
     )
 
     # do the fuzzy compare
-    if is_fuzzy_equal_node(
+    if isFuzzyEqualNode(
         sortedroot1,
         sortedroot2,
         absolute,
@@ -85,13 +93,16 @@ def compare_vtk(vtk1, vtk2, absolute=1.5e-7, relative=1e-2, zeroValueThreshold={
     ):
         print("Fuzzy comparison done (equal)")
         return 0
-    else:
-        print("Fuzzy comparison done (not equal)")
-        return 1
 
+    print("Fuzzy comparison done (not equal)")
+    return 1
 
-# convert a parallel vtu file into sequential one by glueing the pieces together
-def convert_pvtu_to_vtu(pvturoot, filename):
+
+def convertPVTUToVTU(pvturoot, filename):
+    """
+    Convert parallel piece *.pvtu file to sequential *.vtu file
+    by glueing the pieces together removing duplicate vertices
+    """
 
     # get the directory of the vtu file in case the piece paths are relative
     dirname = os.path.dirname(os.path.abspath(filename))
@@ -99,7 +110,8 @@ def convert_pvtu_to_vtu(pvturoot, filename):
     pieces = []
     for piece in pvturoot.findall(".//Piece"):
         piecename = os.path.join(dirname, os.path.basename(piece.attrib["Source"]))
-        pieces.append(ET.fromstring(open(piecename).read()))
+        with open(piecename) as pieceFile:
+            pieces.append(ET.fromstring(pieceFile.read()))
 
     root = pieces[0]
     rootCellDataArrays = []
@@ -140,11 +152,11 @@ def convert_pvtu_to_vtu(pvturoot, filename):
         # compute offset for the offsets vector (it's the last entry of the current root piece)
         for dataArray in root.findall(".//Cells/DataArray"):
             if dataArray.attrib["Name"] == "offsets":
-                offsets_offset = int(dataArray.text.strip().rsplit(" ", 1)[1])
+                offsetsOffset = int(dataArray.text.strip().rsplit(" ", 1)[1])
 
         # add the offsets to the root piece
         for value in offsets.text.strip().split():
-            newvalue = " " + str(int(value) + offsets_offset) + " "
+            newvalue = " " + str(int(value) + offsetsOffset) + " "
             rootOffsets.text += newvalue
 
         # compute offset for the connectivity vector
@@ -186,16 +198,16 @@ def convert_pvtu_to_vtu(pvturoot, filename):
 
 
 # fuzzy compare of VTK nodes
-def is_fuzzy_equal_node(
+def isFuzzyEqualNode(
     node1, node2, absolute, relative, zeroValueThreshold, verbose, convertedFromParallelVtu=False
 ):
-
-    is_equal = True
+    """Check if two XML nodes are equal (node contains attributes and text)"""
+    isEqual = True
     for node1child, node2child in zip(node1.iter(), node2.iter()):
         if node1.tag != node2.tag:
             if verbose:
                 print("The name of the node differs in: {} and {}".format(node1.tag, node2.tag))
-                is_equal = False
+                isEqual = False
             else:
                 return False
         if not convertedFromParallelVtu and list(node1.attrib.items()) != list(
@@ -205,13 +217,13 @@ def is_fuzzy_equal_node(
                 print("Attributes differ in node: {}".format(node1.tag))
                 print("Attributes1: ", list(node1.attrib.items()))
                 print("Attributes2: ", list(node2.attrib.items()))
-                is_equal = False
+                isEqual = False
             else:
                 return False
         if len(list(node1.iter())) != len(list(node2.iter())):
             if verbose:
                 print("Number of children differs in node: {}".format(node1.tag))
-                is_equal = False
+                isEqual = False
             else:
                 return False
         if node1child.text or node2child.text:
@@ -219,7 +231,7 @@ def is_fuzzy_equal_node(
                 numberOfComponents = 1
             else:
                 numberOfComponents = int(node1child.attrib["NumberOfComponents"])
-            if not is_fuzzy_equal_text(
+            if not isFuzzyEqualText(
                 node1child.text,
                 node2child.text,
                 node1child.attrib["Name"],
@@ -231,7 +243,7 @@ def is_fuzzy_equal_node(
             ):
                 if node1child.attrib["Name"] == node2child.attrib["Name"]:
                     if verbose:
-                        is_equal = False
+                        isEqual = False
                     else:
                         return False
                 else:
@@ -241,23 +253,24 @@ def is_fuzzy_equal_node(
                                 node1child.attrib["Name"], node2child.attrib["Name"]
                             )
                         )
-                        is_equal = False
+                        isEqual = False
                     else:
                         return False
-    return is_equal
+    return isEqual
 
 
 # fuzzy compare of text (in the xml sense) consisting of whitespace separated numbers
-def is_fuzzy_equal_text(
+def isFuzzyEqualText(
     text1, text2, parameter, numComp, absolute, relative, zeroValueThreshold, verbose
 ):
+    """Check if a test of two XML nodes is equal"""
     list1 = text1.split()
     list2 = text2.split()
     # difference only in whitespace?
     if list1 == list2:
         return True
     # compare number by number
-    is_equal = True
+    isEqual = True
 
     # first split the list into compononents
     lists1 = []
@@ -275,9 +288,9 @@ def is_fuzzy_equal_text(
         else:
             parameters.append(parameter)
 
-    for list1, list2, parameter in zip(lists1, lists2, parameters):
+    for list1, list2, param in zip(lists1, lists2, parameters):
         # for verbose output
-        max_relative_difference = 0.0
+        maxRelativeDifference = 0.0
         message = ""
 
         # see inspiration, explanations in
@@ -289,10 +302,10 @@ def is_fuzzy_equal_text(
         # check for nan and inf
         for number1, number2 in zip(floatList1, floatList2):
             if math.isnan(number1) or math.isnan(number2):
-                print("Parameter {} contains NaN!".format(parameter))
+                print("Parameter {} contains NaN!".format(param))
                 return False
             if math.isinf(number1) or math.isinf(number2):
-                print("Parameter {} contains inf!".format(parameter))
+                print("Parameter {} contains inf!".format(param))
                 return False
 
         # Manipulate the data set for the sake of sensible comparison.
@@ -300,8 +313,8 @@ def is_fuzzy_equal_text(
         # replace all float under threshold with zero.
         # Only replace them with zero if the parameters in both lists are under the threshold.
         # Otherwise we compare a non-zero value with 0 later.
-        if parameter in zeroValueThreshold:
-            zeroThr = float(zeroValueThreshold[parameter])
+        if param in zeroValueThreshold:
+            zeroThr = float(zeroValueThreshold[param])
             floatList1 = [
                 0.0 if (abs(i) < zeroThr) and (abs(j) < zeroThr) else i
                 for i, j in zip(floatList1, floatList2)
@@ -329,38 +342,37 @@ def is_fuzzy_equal_text(
             # ...if not check the relative criterion
             if diff <= largernumber * relative:
                 continue
+
+            # the numbers are not equal
+            if verbose:
+                isEqual = False
+                if largernumber != 0.0:
+                    if diff / largernumber > maxRelativeDifference:
+                        maxRelativeDifference = diff / largernumber
+                        message = "Difference is too large: {:.2%} -> between: {} and {}".format(
+                            maxRelativeDifference, number1, number2
+                        )
             else:
-                # the numbers are not equal
-                if verbose:
-                    is_equal = False
-                    if largernumber != 0.0:
-                        if diff / largernumber > max_relative_difference:
-                            max_relative_difference = diff / largernumber
-                            message = (
-                                "Difference is too large: {:.2%} -> between: {} and {}".format(
-                                    max_relative_difference, number1, number2
-                                )
-                            )
-                else:
-                    return False
+                return False
 
-        if verbose and max_relative_difference != 0.0:
+        if verbose and maxRelativeDifference != 0.0:
             print(
-                f"\nData differs in parameter: {parameter}\n",
+                f"\nData differs in parameter: {param}\n",
                 message,
-                f"Info for {parameter}: "
+                f"Info for {param}: "
                 f"max_abs_parameter_value={magnitude} and min_abs_parameter_value={minimal}.",
             )
-            if parameter in zeroValueThreshold:
+            if param in zeroValueThreshold:
                 print(
-                    f"For parameter {parameter} a zero value threshold"
-                    f" of {zeroValueThreshold[parameter]} was given."
+                    f"For parameter {param} a zero value threshold"
+                    f" of {zeroValueThreshold[param]} was given."
                 )
 
-    return is_equal
+    return isEqual
 
 
-def sort_by_name(elem):
+def getNameOfNode(elem):
+    """Function helper to return the name of an attribute used as sort key"""
     name = elem.get("Name")
     if name:
         try:
@@ -371,14 +383,16 @@ def sort_by_name(elem):
 
 
 # sorts attributes of an item and returns a sorted item
-def sort_attributes(item, sorteditem):
+def sortAttributes(item, sorteditem):
+    """Sort the attributes of a node"""
     attrkeys = sorted(item.keys())
     for key in attrkeys:
         sorteditem.set(key, item.get(key))
 
 
-def sort_elements(items, newroot):
-    items = sorted(items, key=sort_by_name)
+def sortElements(items, newroot):
+    """Sort all elements of a node"""
+    items = sorted(items, key=getNameOfNode)
     items = sorted(items, key=attrgetter("tag"))
 
     # Once sorted, we sort each of the items
@@ -390,37 +404,59 @@ def sort_elements(items, newroot):
             newitem.text = item.text
 
         # Copy the attributes (sorted by key) to the new item
-        sort_attributes(item, newitem)
+        sortAttributes(item, newitem)
 
         # Copy the children of item (sorted) to the new item
-        sort_elements(list(item), newitem)
+        sortElements(list(item), newitem)
 
         # Append this sorted item to the sorted root
         newroot.append(newitem)
 
 
 # has to sort all Cell and Point Data after the attribute "Name"!
-def sort_vtk(root):
+def sortVTK(root):
+    """Sort VTK file to be comparable (the order of nodes shouldn't matter)"""
     if root.tag != "VTKFile":
         print("Format is not a VTKFile. Sorting will most likely fail!")
     # create a new root for the sorted tree
     newroot = ET.Element(root.tag)
     # create the sorted copy
     # (after the idea of Dale Lane's xmldiff.py)
-    sort_attributes(root, newroot)
-    sort_elements(list(root), newroot)
+    sortAttributes(root, newroot)
+    sortElements(list(root), newroot)
     # return the sorted element tree
     return newroot
 
 
-# sorts the data by point coordinates so that it is independent of index numbering
-def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=False):
-    if not is_fuzzy_equal_node(
+def floatCompare(valueA, valueB, eps):
+    """floating point comparison operator for scalars"""
+    if math.fabs(valueA - valueB) < eps:
+        return 0
+    if valueA > valueB:
+        return 1
+    return -1
+
+
+def floatVecCompare(valueA, valueB, eps):
+    """floating point comparison operator for vectors"""
+    for i, j in zip(valueA, valueB):
+        res = floatCompare(i, j, eps)
+        if res != 0:
+            return res
+    return 0
+
+
+def sortVTKByCoordinates(root1, root2, verbose, convertedFromParallelVtu=False):
+    """
+    Sort VTK coordinates (the order of coordinates shouldn't matter)
+    This usually occurs if different indexing is used (e.g. different grid managers)
+    """
+    if not isFuzzyEqualNode(
         root1.find(".//Points/DataArray"),
         root2.find(".//Points/DataArray"),
         absolute=1e-2,
         relative=1.5e-7,
-        zeroValueThreshold=dict(),
+        zeroValueThreshold={},
         verbose=False,
         convertedFromParallelVtu=False,
     ):
@@ -473,11 +509,11 @@ def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=Fals
             vertex = 0
             for cellIdx, offset in enumerate(offsets):
                 cellArray.append([])
-                for v in range(vertex, int(offset)):
+                for vIdx in range(vertex, int(offset)):
                     if convertedFromParallelVtu:
-                        cellArray[cellIdx].append(uniqueIdx[int(connectivity[v])])
+                        cellArray[cellIdx].append(uniqueIdx[int(connectivity[vIdx])])
                     else:
-                        cellArray[cellIdx].append(int(connectivity[v]))
+                        cellArray[cellIdx].append(int(connectivity[vIdx]))
                     vertex += 1
 
             # for non-conforming output vertices can have the same coordinates and also
@@ -493,37 +529,17 @@ def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=Fals
                         largestCellMidPointForVertex[vertexIndex], midpoint
                     )
 
-            # floating point comparison operator for scalars
-            def float_cmp(a, b, eps):
-                if math.fabs(a - b) < eps:
-                    return 0
-                elif a > b:
-                    return 1
-                else:
-                    return -1
-
-            # floating point comparison operator for vectors
-            def floatvec_cmp(a, b, eps):
-                for i, j in zip(a, b):
-                    res = float_cmp(i, j, eps)
-                    if res != 0:
-                        return res
-                return 0
-
-            # compute an epsilon and a comparison operator for floating point comparisons
-            bBoxMax = max(vertexArray)
-            bBoxMin = min(vertexArray)
-            epsilon = math.sqrt(sum([(a - b) ** 2 for a, b in zip(bBoxMax, bBoxMin)])) * 1e-7
-
             # first compare by coordinates, if the same compare largestCellMidPointForVertex
-            # TODO: is there a more pythonic way?
-            def vertex_cmp(a, b):
-                res = floatvec_cmp(a[1], b[1], epsilon)
+            def vertexCompare(valueA, valueB, largestCellMidPoint, bBoxMax, bBoxMin):
+                epsilon = math.sqrt(sum([(a - b) ** 2 for a, b in zip(bBoxMax, bBoxMin)])) * 1e-7
+                res = floatVecCompare(valueA[1], valueB[1], epsilon)
                 if res != 0:
                     return res
 
-                res2 = floatvec_cmp(
-                    largestCellMidPointForVertex[a[0]], largestCellMidPointForVertex[b[0]], epsilon
+                res2 = floatVecCompare(
+                    largestCellMidPoint[valueA[0]],
+                    largestCellMidPoint[valueB[0]],
+                    epsilon,
                 )
                 if res2 != 0:
                     return res2
@@ -535,10 +551,26 @@ def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=Fals
             for idx, coords in enumerate(vertexArray):
                 vMap.append((idx, coords))
 
+            # compute an epsilon and a comparison operator for floating point comparisons
+            bBoxMax = max(vertexArray)
+            bBoxMin = min(vertexArray)
+
             vertexIndexMap = [0] * len(vMap)
             vertexIndexMapInverse = [0] * len(vMap)
             # first sort by coordinates, if the same by largestCellMidPointForVertex
-            for idxNew, idxOld in enumerate(sorted(vMap, key=functools.cmp_to_key(vertex_cmp))):
+            for idxNew, idxOld in enumerate(
+                sorted(
+                    vMap,
+                    key=functools.cmp_to_key(
+                        functools.partial(
+                            vertexCompare,
+                            largestCellMidPoint=largestCellMidPointForVertex,
+                            bBoxMax=bBoxMax,
+                            bBoxMin=bBoxMin,
+                        )
+                    ),
+                )
+            ):
                 vertexIndexMap[idxOld[0]] = idxNew
                 vertexIndexMapInverse[idxNew] = idxOld[0]
 
@@ -562,7 +594,7 @@ def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=Fals
                 num = int(numberOfComponents[name])
                 newitems = []
                 for i in range(len(items) // num):
-                    newitems.append([i for i in items[i * num : i * num + num]])
+                    newitems.append(list(items[i * num : i * num + num]))
                 items = newitems
                 # sort the items: we have either vertex or cell data
                 if name in pointDataArrays:
@@ -570,8 +602,8 @@ def sort_vtk_by_coordinates(root1, root2, verbose, convertedFromParallelVtu=Fals
                     # from pvd
                     if convertedFromParallelVtu:
                         uniqueItems = [None] * len(vertexArray)
-                        for i in range(len(items)):
-                            uniqueItems[uniqueIdx[i]] = items[i]
+                        for i, item in enumerate(items):
+                            uniqueItems[uniqueIdx[i]] = item
                         sortedItems = [uniqueItems[i] for i in vertexIndexMapInverse]
                     else:
                         sortedItems = [items[i] for i in vertexIndexMapInverse]
@@ -642,7 +674,7 @@ if __name__ == "__main__":
     args = vars(parser.parse_args())
 
     sys.exit(
-        compare_vtk(
+        compareVTK(
             args["vtk_file_1"],
             args["vtk_file_2"],
             args["absolute"],
diff --git a/bin/testing/getchangedfiles.py b/bin/testing/getchangedfiles.py
index 4539a932a00c76fa067f86cc10d6ceffc0398694..e4a3531fa21bf7a2f311cd0318efca8538cea06e 100644
--- a/bin/testing/getchangedfiles.py
+++ b/bin/testing/getchangedfiles.py
@@ -10,12 +10,13 @@ from argparse import ArgumentParser
 
 
 def getCommandOutput(command, cwd=None):
+    """wrapper around subprocess check_output"""
     return subprocess.check_output(command, encoding="ascii", cwd=cwd)
 
 
 # get the files that differ between two trees in a git repo
 def getChangedFiles(gitFolder, sourceTree, targetTree):
-
+    """Find the files that changes between two git trees"""
     gitFolder = os.path.abspath(gitFolder)
     root = getCommandOutput(command=["git", "rev-parse", "--show-toplevel"], cwd=gitFolder).strip(
         "\n"
@@ -61,8 +62,8 @@ if __name__ == "__main__":
     )
     args = vars(parser.parse_args())
 
-    changedFiles = getChangedFiles(args["folder"], args["source_tree"], args["target_tree"])
+    changedFileList = getChangedFiles(args["folder"], args["source_tree"], args["target_tree"])
 
     with open(args["outfile"], "w") as outFile:
-        for file in changedFiles:
+        for file in changedFileList:
             outFile.write(f"{os.path.abspath(file)}\n")
diff --git a/bin/testing/runselectedtests.py b/bin/testing/runselectedtests.py
index 1f8db7ef9ed895b11cccfa057a7db9822550dff6..0e30f53f1e8c90f5e2850edbba0672a9ec19d67b 100755
--- a/bin/testing/runselectedtests.py
+++ b/bin/testing/runselectedtests.py
@@ -16,7 +16,10 @@ if sys.version_info.major < 3:
     sys.exit("Python 3 required")
 
 
-def buildTests(config, flags=["-j8", "--keep-going"]):
+def buildTests(config, flags=None):
+    """Compile the test suite"""
+
+    flags = flags or ["-j8", "--keep-going"]
 
     if not config:
         print("No tests to be built")
@@ -38,7 +41,10 @@ def buildTests(config, flags=["-j8", "--keep-going"]):
     subprocess.run(["make", "-f", "TestMakeFile"] + flags + ["testselection"], check=True)
 
 
-def runTests(config, script="", flags=["-j8", "--output-on-failure"]):
+def runTests(config, script="", flags=None):
+    """Run the tests in the test suite"""
+
+    flags = flags or ["-j8", "--output-on-failure"]
 
     tests = list(config.keys())
     if not tests:
@@ -124,11 +130,11 @@ if __name__ == "__main__":
     # use target selection
     else:
         with open(args["config"]) as configFile:
-            config = json.load(configFile)
-            numTests = len(config)
+            configDict = json.load(configFile)
+            numTests = len(configDict)
             print("{} tests found in the configuration file".format(numTests))
 
             if args["build"]:
-                buildTests(config, buildFlags)
+                buildTests(configDict, buildFlags)
             if args["test"]:
-                runTests(config, dunectest, testFlags)
+                runTests(configDict, dunectest, testFlags)
diff --git a/bin/testing/runtest.py b/bin/testing/runtest.py
index 1b9c7de159c68827d19d4685f313f6fffed4e438..305e74e6aa9abcf8e5c4dcf14abba0c3b82e6b57 100755
--- a/bin/testing/runtest.py
+++ b/bin/testing/runtest.py
@@ -1,121 +1,124 @@
 #!/usr/bin/env python3
+
+"""
+Helper script to run tests in DuMux and enable regression tests
+by data and vtu comparisons
+"""
+
 import argparse
 import shlex
 import os
 import sys
 import subprocess
 import json
-from fuzzycomparevtu import compare_vtk
-from fuzzycomparedata import compare_data
-
-# parse arguments
-parser = argparse.ArgumentParser()
-parser.add_argument(
-    "-c",
-    "--command",
-    nargs=1,
-    help="The executable and optional arguments as a single string",
-    required=True,
-)
-parser.add_argument(
-    "-s",
-    "--script",
-    nargs=1,
-    help=(
-        "The comparison script. [fuzzy, fuzzyData, exact, <path_to_script>]"
-        " where the script takes two files as arguments."
-    ),
-)
-parser.add_argument(
-    "-f",
-    "--files",
-    nargs="+",
-    help=(
-        "Pairs of file names (first reference, then current). "
-        "Usage: '[-f ref1 cur1 [[ref2] [cur2] ...]]'"
-    ),
-)
-parser.add_argument(
-    "-d", "--delimiter", type=str, default=",", help="Column delimiter for data files"
-)
-parser.add_argument(
-    "-r",
-    "--relative",
-    type=float,
-    default=1e-2,
-    help="maximum relative error (default=1e-2) when using fuzzy comparison",
-)
-parser.add_argument(
-    "-a",
-    "--absolute",
-    type=float,
-    default=1.5e-7,
-    help="maximum absolute error (default=1.5e-7) when using fuzzy comparison",
-)
-parser.add_argument(
-    "-z",
-    "--zeroThreshold",
-    type=json.loads,
-    default="{}",
-    help=(
-        "Thresholds for treating numbers as zero for"
-        ' a parameter as a python dict e.g. {"vel":1e-7,"delP":1.0}'
-    ),
-)
-args = vars(parser.parse_args())
-
-# check parameters
-if args["script"]:
-    if len(args["files"]) % 2 != 0 or not args["files"]:
-        sys.stderr.write(
-            "The files have to be pairs of reference and current solution files."
-            " Usage '-f [ref1] [cur1] [[ref2] [cur2] ...]'"
-        )
-        parser.print_help()
-        sys.exit(1)
-    for i in range(0, len(args["files"]) // 2):
-        # delete the vtu files to compare
-        ref_dir = os.path.dirname(os.path.abspath(__file__)).rstrip("bin") + "test/references"
-        if os.path.dirname(args["files"][(i * 2) + 1]) == ref_dir:
+from fuzzycomparevtu import compareVTK
+from fuzzycomparedata import compareData
+
+
+def readCmdParameters():
+    """Read the command line parameters"""
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "-c",
+        "--command",
+        nargs=1,
+        help="The executable and optional arguments as a single string",
+        required=True,
+    )
+    parser.add_argument(
+        "-s",
+        "--script",
+        nargs=1,
+        help=(
+            "The comparison script. [fuzzy, fuzzyData, exact, <path_to_script>]"
+            " where the script takes two files as arguments."
+        ),
+    )
+    parser.add_argument(
+        "-f",
+        "--files",
+        nargs="+",
+        help=(
+            "Pairs of file names (first reference, then current). "
+            "Usage: '[-f ref1 cur1 [[ref2] [cur2] ...]]'"
+        ),
+    )
+    parser.add_argument(
+        "-d", "--delimiter", type=str, default=",", help="Column delimiter for data files"
+    )
+    parser.add_argument(
+        "-r",
+        "--relative",
+        type=float,
+        default=1e-2,
+        help="maximum relative error (default=1e-2) when using fuzzy comparison",
+    )
+    parser.add_argument(
+        "-a",
+        "--absolute",
+        type=float,
+        default=1.5e-7,
+        help="maximum absolute error (default=1.5e-7) when using fuzzy comparison",
+    )
+    parser.add_argument(
+        "-z",
+        "--zeroThreshold",
+        type=json.loads,
+        default="{}",
+        help=(
+            "Thresholds for treating numbers as zero for"
+            ' a parameter as a python dict e.g. {"vel":1e-7,"delP":1.0}'
+        ),
+    )
+    args = vars(parser.parse_args())
+
+    # check parameters
+    if args["script"]:
+        if len(args["files"]) % 2 != 0 or not args["files"]:
             sys.stderr.write(
-                "Tried to delete a reference solution. "
-                "Specify reference file first, then the current solution. "
-                "Usage: '[-f ref1 cur1 [[ref2] [cur2] ...]]'"
+                "The files have to be pairs of reference and current solution files."
+                " Usage '-f [ref1] [cur1] [[ref2] [cur2] ...]'"
             )
+            parser.print_help()
             sys.exit(1)
-        subprocess.call(["rm", "-fv", args["files"][(i * 2) + 1]])
-
-# run the test
-res = 1
-try:
-    res = subprocess.call(shlex.split(args["command"][0]))
-except OSError:
-    print(args["command"][0].split())
-    print("OSError: Command not found. Most likely the executable specified doesn't exist.")
-    sys.exit(1)
-if res:
-    sys.exit(res)
-
-# run the comparison
-if args["script"]:
+        for i in range(0, len(args["files"]) // 2):
+            # delete the vtu files to compare
+            referenceDirectory = (
+                os.path.dirname(os.path.abspath(__file__)).rstrip("bin") + "test/references"
+            )
+            if os.path.dirname(args["files"][(i * 2) + 1]) == referenceDirectory:
+                sys.stderr.write(
+                    "Tried to delete a reference solution. "
+                    "Specify reference file first, then the current solution. "
+                    "Usage: '[-f ref1 cur1 [[ref2] [cur2] ...]]'"
+                )
+                sys.exit(1)
+            subprocess.call(["rm", "-fv", args["files"][(i * 2) + 1]])
+
+    return args
+
+
+def runRegressionTest(args):
+    """Run regression test scripts against reference data"""
+
     # exact comparison?
     if args["script"] == ["exact"]:
-        return_code = 0
+        returnCode = 0
         for i in range(0, len(args["files"]) // 2):
             print("\nExact comparison...")
             result = subprocess.call(["diff", args["files"][i * 2], args["files"][(i * 2) + 1]])
             if result:
-                return_code = 1
-        sys.exit(return_code)
+                returnCode = 1
+        sys.exit(returnCode)
 
     # fuzzy comparison?
     elif args["script"] == ["fuzzy"] or args["script"] == [
         os.path.dirname(os.path.abspath(__file__)) + "/fuzzycomparevtu.py"
     ]:
-        return_code = 0
+        returnCode = 0
         for i in range(0, len(args["files"]) // 2):
             print("\nFuzzy comparison...")
-            result = compare_vtk(
+            result = compareVTK(
                 args["files"][i * 2],
                 args["files"][(i * 2) + 1],
                 relative=args["relative"],
@@ -123,15 +126,15 @@ if args["script"]:
                 zeroValueThreshold=args["zeroThreshold"],
             )
             if result:
-                return_code = 1
-        sys.exit(return_code)
+                returnCode = 1
+        sys.exit(returnCode)
 
     # fuzzy comparison of data sets?
     elif args["script"] == ["fuzzyData"]:
-        return_code = 0
+        returnCode = 0
         for i in range(0, len(args["files"]) // 2):
             print("\nFuzzy data comparison...")
-            result = compare_data(
+            result = compareData(
                 args["files"][i * 2],
                 args["files"][(i * 2) + 1],
                 args["delimiter"],
@@ -140,20 +143,45 @@ if args["script"]:
                 zeroValueThreshold=args["zeroThreshold"],
             )
             if result:
-                return_code = 1
-        sys.exit(return_code)
+                returnCode = 1
+        sys.exit(returnCode)
 
     # other script?
     else:
-        return_code = 0
+        returnCode = 0
         for i in range(0, len(args["files"]) // 2):
             print("\n{} comparison...".format(args["script"]))
             result = subprocess.call(
                 args["script"], args["files"][i * 2], args["files"][(i * 2) + 1]
             )
             if result:
-                return_code = 1
-        sys.exit(return_code)
+                returnCode = 1
+        sys.exit(returnCode)
+
+
+def runTest():
+    """Run a DuMux test"""
+
+    args = readCmdParameters()
+
+    # run the test
+    res = 1
+    try:
+        res = subprocess.call(shlex.split(args["command"][0]))
+    except OSError:
+        print(args["command"][0].split())
+        print("OSError: Command not found. Most likely the executable specified doesn't exist.")
+        sys.exit(1)
+    if res:
+        sys.exit(res)
+
+    # run the comparison
+    if args["script"]:
+        runRegressionTest(args=args)
+
+    # everything is fine
+    sys.exit(0)
+
 
-# everything is fine
-sys.exit(0)
+if __name__ == "__main__":
+    runTest()
diff --git a/bin/util/common.py b/bin/util/common.py
index cf8027939190e7d104915263051f56e7cc1c0132..335a776d0508f2fcd3dae74f1c4bc61d5462a652 100644
--- a/bin/util/common.py
+++ b/bin/util/common.py
@@ -1,3 +1,7 @@
+"""
+Helper functions used in several utility scripts (e.g. extract_module.py, ...)
+"""
+
 import os
 import re
 import sys
@@ -16,38 +20,45 @@ TERM_FORMATTING = {
 }
 
 
-def styledBotPrint(s, style="none", **kwargs):
+def styledBotPrint(string, style="none", **kwargs):
+    """Styled print to terminal when asking user for input"""
     sys.stdout.write("\n🤖 ")
     sys.stdout.write(TERM_FORMATTING[style])
-    print(s, **kwargs)
+    print(string, **kwargs)
     sys.stdout.write(TERM_FORMATTING["reset"])
 
 
 def addPrefix(prefix, text, separator=" "):
+    """Add prefix to a string"""
     return prefix + separator + text
 
 
 def addPrefixToLines(prefix, text, separator=" "):
+    """Add prefix every line of a multiline string (separated by endline character)"""
     return "\n".join(addPrefix(prefix, line, separator) for line in text.split("\n"))
 
 
 def escapeCharacter(text, character, escCharacter="\\"):
+    """Escape a given character with backslashes"""
     return text.replace(character, f"{escCharacter}{character}")
 
 
 def escapeCharacters(text, characters, escCharacter="\\"):
+    """Escape given characters with backslashes"""
     for char in characters:
         text = escapeCharacter(text, char, escCharacter)
     return text
 
 
 def indent(text, indentation="  "):
+    """Add space indentation to text"""
     text = text.split("\n")
     text = [indentation + line for line in text]
     return "\n".join(text)
 
 
 def makeTable(dictList, config=None, padding=2):
+    """Format as a table"""
     if config is None:
         config = {key: key for d in dictList for key in d}
 
@@ -72,6 +83,7 @@ def makeTable(dictList, config=None, padding=2):
 
 
 def getCommandErrorHints(command):
+    """Maybe give a hint matching command"""
     if "git " in command:
         return (
             "It seems that a git command failed. Please check:\n"
@@ -88,7 +100,7 @@ def runCommand(command, check=True, suppressTraceBack=False, errorMessage=""):
         return subprocess.run(
             shlex.split(command), check=check, text=True, capture_output=True
         ).stdout
-    except Exception:
+    except subprocess.CalledProcessError:
         eType, eValue, eTraceback = sys.exc_info()
         if suppressTraceBack:
             traceback.print_exception(eType, eType(errorMessage), None)
@@ -102,23 +114,24 @@ def runCommand(command, check=True, suppressTraceBack=False, errorMessage=""):
             hints = getCommandErrorHints(command)
             if hints is not None:
                 print(hints)
+        return ""
 
 
 def callFromPath(path):
     """decorator to call function from within the given path"""
 
-    def decorator_callFromPath(callFunc):
+    def decorateCallFromPath(callFunc):
         @functools.wraps(callFunc)
-        def wrapper_callFromPath(*args, **kwargs):
+        def wrapCallFromPath(*args, **kwargs):
             curPath = os.getcwd()
             os.chdir(path)
             result = callFunc(*args, **kwargs)
             os.chdir(curPath)
             return result
 
-        return wrapper_callFromPath
+        return wrapCallFromPath
 
-    return decorator_callFromPath
+    return decorateCallFromPath
 
 
 def userQuery(query, choices=None):
@@ -172,29 +185,34 @@ def queryYesNo(question, default="yes"):
         choice = input().lower()
 
         if default is not None and choice == "":
-            return True if isAffirmative(default) else False
+            return isAffirmative(default)
 
         if not isValid(choice):
             styledBotPrint(
                 f"Invalid answer: '{choice}'. Choose from '{getChoices()}'", style="warning"
             )
         else:
-            return True if isAffirmative(choice) else False
+            return isAffirmative(choice)
 
 
 def cppHeaderFilter():
+    """
+    Filter out source files that are not headers
+    (sources are determined by looking for config.h)
+    """
     return lambda fileName: fileName == "config.h"
 
 
-def includedCppProjectHeaders(file, projectBase, headers=[], headerFilter=cppHeaderFilter()):
+def includedCppProjectHeaders(file, projectBase, headers=None, headerFilter=cppHeaderFilter()):
     """get all project headers included by a cpp file"""
+    headers = headers or []
 
     filePath = os.path.join(projectBase, file)
     if not os.path.exists(filePath):
         raise IOError(f"Cpp file {filePath} does not exist")
 
-    with open(filePath, "r") as f:
-        content = f.read()
+    with open(filePath, "r") as sourceFile:
+        content = sourceFile.read()
         headerInBracket = re.findall(r"#include\s+<(.+?)>", content)
         headerInQuotation = re.findall(r'#include\s+"(.+?)"', content)
 
@@ -219,7 +237,7 @@ def findMatchingFiles(path, pattern):
     """find all files below the given folder that match the given pattern"""
 
     result = []
-    for root, dirs, files in os.walk(path):
+    for root, _, files in os.walk(path):
         relativeRootPath = os.path.relpath(root, path)
         for file in files:
             if fnmatch.fnmatch(file, pattern):
@@ -228,30 +246,35 @@ def findMatchingFiles(path, pattern):
 
 
 def isGitRepository(pathToRepo="."):
+    """Check if git repository exists at given path"""
     try:
         run = callFromPath(pathToRepo)(runCommand)
         run("git status")
         return True
-    except Exception:
+    except subprocess.CalledProcessError:
         return False
 
 
 def getRemote(pathToRepo="."):
+    """Check if git remote exists at given path"""
     run = callFromPath(pathToRepo)(runCommand)
     return run("git ls-remote --get-url").strip("\n")
 
 
 def fetchRepo(remote, pathToRepo="."):
+    """Fetch repo"""
     run = callFromPath(pathToRepo)(runCommand)
     run("git fetch {}".format(remote))
 
 
 def hasUntrackedFiles(pathToRepo="."):
+    """Check for untracked (by git) file for given repo"""
     run = callFromPath(pathToRepo)(runCommand)
     return run("git ls-files --others --exclude-standard") != ""
 
 
 def isPersistentBranch(branchName):
+    """Check if a branch is what we consider persistent (default protected branches)"""
     if branchName == "origin/master":
         return True
     if branchName.startswith("origin/releases/"):
@@ -262,6 +285,7 @@ def isPersistentBranch(branchName):
 # get the most recent commit that also exists on remote master/release branch
 # may be used to find a commit we can use as basis for a pub module
 def mostRecentCommonCommitWithRemote(modFolderPath, branchFilter=isPersistentBranch):
+    """Find most recent commit shared with remote"""
     run = callFromPath(modFolderPath)(runCommand)
 
     def findBranches(sha):
@@ -282,7 +306,7 @@ def mostRecentCommonCommitWithRemote(modFolderPath, branchFilter=isPersistentBra
 
 # function to extract persistent, remotely available git versions for all
 def getPersistentVersions(modFolderPaths, ignoreUntracked=False):
-
+    """Get versions of last commit on a persistent branch"""
     result = {}
     for modFolderPath in modFolderPaths:
 
@@ -318,6 +342,7 @@ def getPersistentVersions(modFolderPaths, ignoreUntracked=False):
 
 
 def getPatches(persistentVersions):
+    """Generate patches"""
     result = {}
     for path, gitInfo in persistentVersions.items():
         run = callFromPath(path)(runCommand)
@@ -354,18 +379,25 @@ def getPatches(persistentVersions):
     return result
 
 
+DEFAULT_VERSION_TABLE_CONFIG = {
+    "name": "module name",
+    "branch": "branch name",
+    "revision": "commit sha",
+    "date": "commit date",
+}
+
+
 def versionTable(
     versions,
-    config={
-        "name": "module name",
-        "branch": "branch name",
-        "revision": "commit sha",
-        "date": "commit date",
-    },
+    config=None,
     padding=2,
 ):
-    return makeTable(versions, config)
+    """Make a table containing module versions"""
+    if config is None:
+        config = DEFAULT_VERSION_TABLE_CONFIG
+    return makeTable(versions, config, padding)
 
 
 def printVersionTable(versions):
+    """Print a table containing module versions"""
     print(versionTable(versions=versions))
diff --git a/bin/util/installscript.py b/bin/util/installscript.py
index 10636bcf375758d0ecfe7dd517375b99926e4e33..1a3f40b3648a3a3607f6039fee204a4fb4d08878 100644
--- a/bin/util/installscript.py
+++ b/bin/util/installscript.py
@@ -17,28 +17,39 @@ if sys.version_info[0] < 3:
 
 
 def supportedLanguages():
+    """Supported languages for the install script output"""
     return ["python", "bash"]
 
 
 def getScriptExtension(language):
+    """Default script extension for the given language"""
     assert language in supportedLanguages()
     ext = {"python": ".py", "bash": ".sh"}
     return ext[language]
 
 
+def getScriptLanguageFromExtension(ext):
+    """Default script language for the given extension"""
+    language = {".py": "python", ".sh": "bash"}
+    return language[ext]
+
+
 def makeScriptWriter(language):
+    """Create a new install script writer instance"""
     if language == "bash":
         return InstallScriptWriterBash()
-    elif language == "python":
+    if language == "python":
         return InstallScriptWriterPython()
     raise ValueError(f"Could not create writer for language {language}")
 
 
 def getDefaultScriptName(modName, language):
+    """The default script name"""
     return "install_{}{}".format(modName, getScriptExtension(language))
 
 
 def printProgressInfo(infoLines, indLevel=0):
+    """Inform user about progress"""
     firstPrefix = "\n" + "--" * (indLevel + 1)
     emptyPrefix = firstPrefix.replace("-", " ").strip("\n")
     print(f"{firstPrefix} {infoLines[0]}")
@@ -46,18 +57,20 @@ def printProgressInfo(infoLines, indLevel=0):
         print(f"{emptyPrefix} {line}")
 
 
-def filterDependencies(dependencies, skipFolders=[]):
-    if not skipFolders:
+def filterDependencies(dependencies, skipFolders=None):
+    """Filter dependencies to skip given folders"""
+    if skipFolders is None:
         return dependencies
-    else:
 
-        def skipFolder(folderName):
-            return any(folderName == os.path.basename(path) for path in skipFolders)
+    def skipFolder(folderName):
+        return any(folderName == os.path.basename(path) for path in skipFolders)
 
-        return [dep for dep in dependencies if not skipFolder(dep["folder"])]
+    return [dep for dep in dependencies if not skipFolder(dep["folder"])]
 
 
 def addDependencyVersions(dependencies, ignoreUntracked=False):
+    """Add version info to all dependencies"""
+
     def getKey(dependency):
         return dependency["path"]
 
@@ -73,6 +86,8 @@ def addDependencyVersions(dependencies, ignoreUntracked=False):
 
 
 def addDependencyPatches(dependenciesWithVersions):
+    """Add patch info to all dependencies"""
+
     def getKey(dependency):
         return dependency["path"]
 
@@ -85,10 +100,10 @@ def addDependencyPatches(dependenciesWithVersions):
     return mergedResult
 
 
-def makeInstallScript(
-    modPath, dependencies, scriptName, writer, topFolderName="DUMUX", optsFile=None
-):
-
+def makeInstallScript(modPath, dependencies, scriptName, topFolderName="DUMUX", optsFile=None):
+    """Main driver: create installation script for a dune module"""
+    _, extension = os.path.splitext(scriptName)
+    writer = makeScriptWriter(getScriptLanguageFromExtension(extension))
     modPath = os.path.abspath(modPath)
     modName = getModuleInfo(modPath, "Module")
 
@@ -134,22 +149,21 @@ def makeInstallScript(
             writer.writeMessageOutput("Installing {}".format(dep["name"]))
             writer.writeInstallation(dep)
 
-        for dep in dependencies:
-
-            def writePatch(patch, moduleName, description):
-                script.write("\n")
-                writer.writeMessageOutput(f"Applying patch for {description} in {moduleName}")
-                writer.writePatchApplication(dep["folder"], patch)
+        def writePatch(patch, description, moduleName, folder):
+            script.write("\n")
+            writer.writeMessageOutput(f"Applying patch for {description} in {moduleName}")
+            writer.writePatchApplication(folder, patch)
 
+        for dep in dependencies:
             if dep["untracked"] is not None:
                 description = "untracked files"
-                writePatch(dep["untracked"], description, dep["name"])
+                writePatch(dep["untracked"], description, dep["name"], dep["folder"])
             if dep["unpublished"] is not None:
                 description = "unpublished commits"
-                writePatch(dep["unpublished"], description, dep["name"])
+                writePatch(dep["unpublished"], description, dep["name"], dep["folder"])
             if dep["uncommitted"] is not None:
                 description = "uncommitted changes"
-                writePatch(dep["uncommitted"], description, dep["name"])
+                writePatch(dep["uncommitted"], description, dep["name"], dep["folder"])
 
         script.write("\n")
         writer.writeMessageOutput("Configuring project")
@@ -157,6 +171,7 @@ def makeInstallScript(
 
 
 def printFoundDependencies(deps):
+    """Output found dependencies"""
     if len(deps) > 0:
         infoText = ["Found the following dependencies"]
         infoText.extend(versionTable(deps, {"name": "module name", "path": "folder"}).split("\n"))
@@ -164,6 +179,7 @@ def printFoundDependencies(deps):
 
 
 def printFoundVersionInfo(dependenciesWithVersions):
+    """Output found versions"""
     table = versionTable(dependenciesWithVersions)
     printProgressInfo(
         [
@@ -174,8 +190,8 @@ def printFoundVersionInfo(dependenciesWithVersions):
     )
 
 
-def printFinalMessage(scriptName, topFolderName=None):
-
+def printFinalMessage(topFolderName=None):
+    """Final message after the install script has been created"""
     if topFolderName:
         description = textwrap.dedent(
             f"""\
diff --git a/bin/util/installscript_writer.py b/bin/util/installscript_writer.py
index 40e99c0776ed52def3f1f146f71b310ea1b9627a..57999ae32d9b308ec9f4bcb4d754db069d166ee4 100644
--- a/bin/util/installscript_writer.py
+++ b/bin/util/installscript_writer.py
@@ -6,6 +6,8 @@ from util.common import addPrefixToLines, escapeCharacters
 
 
 def getRawString(text):
+    """Get a raw string that can be written to file"""
+
     def makeRaw(text):
         return repr(text)
 
@@ -16,56 +18,67 @@ def getRawString(text):
 
 
 class InstallScriptWriterInterface(ABC):
+    """Abstract writer interface to be implemented for dune module install script writers"""
+
     def __init__(self):
+        """Initialize"""
+        super().__init__()
         self.ostream = None
 
     def setOutputStream(self, stream):
+        """Where do we write to?"""
         self.ostream = stream
 
     @abstractmethod
     def writeSheBang(self):
-        pass
+        """
+        Write the she bang (first line of the script that
+        specifies program executing the script)
+        """
 
     @abstractmethod
     def writeComment(self, comment):
-        pass
+        """Write a code comment"""
 
     @abstractmethod
     def writeMessageOutput(self, message):
-        pass
+        """Write a message"""
 
     @abstractmethod
     def writePreamble(self, topFolderName=None):
-        pass
+        """Write the preamble of the script"""
 
     @abstractmethod
     def writeInstallation(self, dependency):
-        pass
+        """Write the installation process of module dependencies"""
 
     @abstractmethod
-    def writePatchApplication(self, folder, patchName):
-        pass
+    def writePatchApplication(self, folder, patchContent):
+        """Write the part that applies patches"""
 
     @abstractmethod
-    def writeConfiguration(self, optsFile):
-        pass
+    def writeConfiguration(self, opts):
+        """Write the configuration part"""
 
 
 class InstallScriptWriterBash(InstallScriptWriterInterface):
-    def __init__(self):
-        super().__init__()
+    """Write a bash install script"""
 
     def writeSheBang(self):
+        """Shebang for bash"""
         self.ostream.write("#!/bin/bash\n")
 
     def writeComment(self, comment):
+        """Write a code comment"""
         comment = addPrefixToLines("#", comment)
         self.ostream.write(comment)
 
     def writeMessageOutput(self, message):
+        """Write a message"""
         self.ostream.write(f'echo "{message}"\n')
 
     def writePreamble(self, topFolderName=None):
+        """Write preable of the script (utility functions)"""
         self.ostream.write(
             textwrap.dedent(
                 """\
@@ -116,6 +129,7 @@ class InstallScriptWriterBash(InstallScriptWriterInterface):
         self.ostream.write("cd $TOP\n")
 
     def writeInstallation(self, dependency):
+        """Write installation part of the script"""
         self.ostream.write(
             "installModule {} {} {} {}".format(
                 dependency["folder"],
@@ -126,6 +140,8 @@ class InstallScriptWriterBash(InstallScriptWriterInterface):
         )
 
     def writePatchApplication(self, folder, patchContent):
+        """Write patch application part of the script"""
+
         def removeEscapedSingleQuotes(line):
             return line.replace(r"\'", "'")
 
@@ -140,6 +156,7 @@ class InstallScriptWriterBash(InstallScriptWriterInterface):
         self.ostream.write(f'applyPatch {folder} "$PATCH"')
 
     def writeConfiguration(self, opts):
+        """Write configure part of the script"""
         self.ostream.write(
             f"if ! ./dune-common/bin/dunecontrol --opts={opts} all; then\n"
             '    echo "Configuration of the project failed"\n'
@@ -149,20 +166,23 @@ class InstallScriptWriterBash(InstallScriptWriterInterface):
 
 
 class InstallScriptWriterPython(InstallScriptWriterInterface):
-    def __init__(self):
-        super().__init__()
+    """Write a Python install script"""
 
     def writeSheBang(self):
+        """Shebang for python3"""
         self.ostream.write("#!/usr/bin/env python3\n")
 
     def writeComment(self, comment):
+        """Write a code comment"""
         comment = addPrefixToLines("#", comment)
         self.ostream.write(comment)
 
     def writeMessageOutput(self, message):
+        """Write a message"""
         self.ostream.write(f'print("{message}")\n')
 
     def writePreamble(self, topFolderName=None):
+        """Write the preamble of the script"""
         top = topFolderName if topFolderName else "."
         self.ostream.write(
             textwrap.dedent(
@@ -212,6 +232,7 @@ class InstallScriptWriterPython(InstallScriptWriterInterface):
         )
 
     def writeInstallation(self, dependency):
+        """Write installation part of the script"""
         self.ostream.write(
             'installModule("{}", "{}", "{}", "{}")\n'.format(
                 dependency["folder"],
@@ -222,6 +243,7 @@ class InstallScriptWriterPython(InstallScriptWriterInterface):
         )
 
     def writePatchApplication(self, folder, patchContent):
+        """Write patch application part of the script"""
         self.ostream.write('patch = """\n')
         for line in patchContent.rstrip("\n").split("\n"):
             line = getRawString(line)
@@ -232,6 +254,7 @@ class InstallScriptWriterPython(InstallScriptWriterInterface):
         self.ostream.write(f'applyPatch("{folder}", patch)\n')
 
     def writeConfiguration(self, opts):
+        """Write configure part of the script"""
         self.ostream.write(
             "runFromSubFolder(\n"
             f"    ['./dune-common/bin/dunecontrol', '--opts={opts}', 'all'],\n"
diff --git a/bin/util/moduleinfo.py b/bin/util/moduleinfo.py
index 643924c149a85c5893367a19d03d60535bc1e506..9466255e3d677276d5b2ce0e9383de7a705b86b5 100644
--- a/bin/util/moduleinfo.py
+++ b/bin/util/moduleinfo.py
@@ -1,9 +1,14 @@
+"""
+Read information from dune.module files
+"""
+
 import os
 from util.common import runCommand
 from util.common import callFromPath
 
 
 def extractModuleInfos(moduleFile, keys):
+    """Extract information about a Dune module from its dune.module file"""
     results = {}
     with open(moduleFile, "r") as modFile:
         for line in modFile.readlines():
@@ -16,13 +21,14 @@ def extractModuleInfos(moduleFile, keys):
     if len(results) != len(keys):
         errMsg = "Could not extract requested information for all keys.\n"
         errMsg += "Requested keys: " + ", ".join(keys) + "\n"
-        errMsg += "Processed keys: " + ", ".join([k for k in results])
+        errMsg += "Processed keys: " + ", ".join(list(results))
         raise RuntimeError(errMsg)
 
     return results
 
 
 def getModuleFile(modulePath):
+    """Read the dune.module file"""
     modFile = os.path.join(modulePath, "dune.module")
     if not os.path.exists(modFile):
         raise RuntimeError("Could not find module file")
@@ -30,10 +36,21 @@ def getModuleFile(modulePath):
 
 
 def getModuleInfo(modulePath, key):
+    """Read information about Dune module"""
     return extractModuleInfos(getModuleFile(modulePath), [key])[key]
 
 
+def parseModuleList(dunecontrolOutput):
+    """Determine the module dependencies from dunecontrol terminal output"""
+    for line in dunecontrolOutput.split("\n"):
+        if "going to build" in line:
+            line = line.replace("going to build", "").strip("-").strip("\n").strip().split(" ")
+            return line
+    return []
+
+
 def getDependencies(modulePath, verbose=False, includeSelf=False):
+    """Get the dependencies of a Dune module"""
     modName = getModuleInfo(modulePath, "Module")
     parentPath = os.path.join(modulePath, "../")
     duneControlPath = os.path.join(parentPath, "dune-common/bin/dunecontrol")
@@ -49,18 +66,13 @@ def getDependencies(modulePath, verbose=False, includeSelf=False):
     if not dcOutput:
         raise RuntimeError("Error: call to dunecontrol failed.")
 
-    for line in dcOutput.split("\n"):
-        if "going to build" in line:
-            line = line.replace("going to build", "").strip("-")
-            line = line.strip("\n").strip()
-            line = line.split(" ")
-            deps = line
+    dependencyList = parseModuleList(dcOutput)
 
     if not includeSelf:
-        deps.remove(modName)
+        dependencyList.remove(modName)
 
     if verbose:
-        print(" -- Determined the following dependencies: " + ", ".join(deps))
+        print(" -- Determined the following dependencies: " + ", ".join(dependencyList))
         print(" -- Searching the respective directories...")
 
     result = []
@@ -68,7 +80,7 @@ def getDependencies(modulePath, verbose=False, includeSelf=False):
     for path in filter(os.path.isdir, parentFiles):
         try:
             depModName = getModuleInfo(path, "Module")
-        except Exception:
+        except RuntimeError:
             if verbose:
                 print(
                     f" --- skipping folder '{path}' " "as it could not be identifed as dune module"
@@ -76,11 +88,11 @@ def getDependencies(modulePath, verbose=False, includeSelf=False):
         else:
             if verbose:
                 print(" --- visited module '{}'".format(depModName))
-            if depModName in deps:
+            if depModName in dependencyList:
                 result.append({"name": depModName, "folder": os.path.basename(path), "path": path})
 
-    if len(result) != len(deps):
+    if len(result) != len(dependencyList):
         raise RuntimeError("Could not find the folders of all dependencies")
-    elif verbose:
+    if verbose:
         print(" -- Found all module folders of the dependencies.")
     return result
diff --git a/python/dumux/common/properties.py b/python/dumux/common/properties.py
index 545d15a0a2d085baf1fd9efbbc0ce0fd65190ddc..67cbf15276e7df0d7d92231346398aba7a806c81 100644
--- a/python/dumux/common/properties.py
+++ b/python/dumux/common/properties.py
@@ -277,12 +277,10 @@ class TypeTag:
                 file += typePropertyToString(prop, self.name, self[prop]) + "\n\n"
 
         if self.gridGeometry is not None:
-            file += (
-                typePropertyToString(
-                    "Grid", self.name, Property.fromCppType("typename TypeTag::GridGeometry::Grid")
-                )
-                + "\n\n"
+            file += typePropertyToString(
+                "Grid", self.name, Property.fromCppType("typename TypeTag::GridGeometry::Grid")
             )
+            file += "\n\n"
 
         file += "} // end namespace Dumux::Properties \n\n"
         file += "#endif"
diff --git a/python/dumux/porousmediumflow/__init__.py b/python/dumux/porousmediumflow/__init__.py
index 15e8eed9ab3ee0f68422cc5024757f5e8be37bd1..3a4c9c6729a88020ab79122bb672b416a4b07c71 100644
--- a/python/dumux/porousmediumflow/__init__.py
+++ b/python/dumux/porousmediumflow/__init__.py
@@ -26,11 +26,11 @@ def _createPorousMediumFlowProblemDecorator(
         problemType = (
             "Dumux::Python::PorousMediumFlowProblem" f"<{ggType}, {priVars}, {spType}, {enableIDC}>"
         )
-        includes = (
-            gridGeometry._includes
-            + spatialParams._includes
-            + ["dumux/python/porousmediumflow/problem.hh"]
-        )
+        includes = [
+            *(gridGeometry._includes),
+            *(spatialParams._includes),
+            *["dumux/python/porousmediumflow/problem.hh"],
+        ]
         moduleName = "fvproblem_" + hashIt(problemType)
         generator = SimpleGenerator("PorousMediumFlowProblem", "Dumux::Python")
         module = generator.load(includes, problemType, moduleName, holder="std::shared_ptr")