Commit 2a45972f authored by Bernd Flemisch's avatar Bernd Flemisch
Browse files

Merge branch 'feature/ci-integration' into 'master'

WIP Feature/ci integration

See merge request !2408
parents 32c688ec 53bba82c
stages:
- configure
- trigger
- downstream modules
variables:
IMAGE_REGISTRY_URL: $CI_REGISTRY/dumux-repositories/dumux-docker-ci
# Cases in which to create a pipeline. The `select-pipeline` job further
# specifies the situations in which they must be started manually. Currently,
# we only have automatic pipeline triggers for scheduled pipelines.
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
################################################################################
# Stage 1: configure the test pipeline. #
# This creates the .yml to be used for the test pipeline trigger stage. Within #
# merge request, we create a .yml file that adds a test selection stage before #
# the build stage to identify the tests affected by changes introduced in the #
# merge request. In all other cases, we use the default which runs all tests. #
################################################################################
select-pipeline:
image: $IMAGE_REGISTRY_URL/full:dune-2.7-gcc-ubuntu-20.04
stage: configure
script:
- |
if [ $CI_PIPELINE_SOURCE == "merge_request_event" ]; then
cp .gitlab-ci/affectedtestsonly.yml pipeline-config.yml
else
cp .gitlab-ci/default.yml pipeline-config.yml
fi
artifacts:
paths:
- pipeline-config.yml
expire_in: 3 hours
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: manual
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
###################################################################################
# Stage 2: trigger the Dumux test pipelines #
# In this stage, we trigger the test pipeline with different configurations, i.e. #
# different Dune versions, compilers, etc. Within merge requests, we create three #
# test pipelines including two different compilers and a full and minimal setup #
# of dependencies. In all other situations, additional test jobs are created. #
###################################################################################
# basic trigger job to start the test pipeline
.base-trigger:
stage: trigger
needs:
- select-pipeline
trigger:
include:
- artifact: pipeline-config.yml
job: select-pipeline
strategy: depend
# trigger for jobs that should not be created in merge requests
.non-mr-trigger:
extends: .base-trigger
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event"
#############################################
# pipelines to be created in merge requests #
full-dune-2.7-gcc:
extends: .base-trigger
variables:
IMAGE: $IMAGE_REGISTRY_URL/full:dune-2.7-gcc-ubuntu-20.04
minimal-dune-2.7-gcc:
extends: .base-trigger
variables:
IMAGE: $IMAGE_REGISTRY_URL/minimal:dune-2.7-gcc-ubuntu-20.04
full-dune-2.7-clang:
extends: .base-trigger
variables:
IMAGE: $IMAGE_REGISTRY_URL/full:dune-2.7-clang-ubuntu-20.04
##################################
# additional scheduled pipelines #
full-dune-master-gcc:
extends: .non-mr-trigger
variables:
IMAGE: $IMAGE_REGISTRY_URL/full:dune-master-gcc-ubuntu-20.04
full-dune-master-clang:
extends: .non-mr-trigger
variables:
IMAGE: $IMAGE_REGISTRY_URL/full:dune-master-clang-ubuntu-20.04
#########################################################
# Stage 3: trigger test pipelines of downstream modules #
#########################################################
# trigger lecture test
trigger lecture:
stage: downstream modules
trigger:
project: dumux-repositories/dumux-lecture
# TODO: use master when lecture pipeline is set up
branch: feature/test-dumux-trigger
strategy: depend
variables:
DUMUX_MERGE_REQUEST_BRANCH: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
default:
image: $IMAGE
stages:
- configure
- build
- test
workflow:
rules:
- if: $CI_PIPELINE_SOURCE=="parent_pipeline"
select tests:
stage: configure
script:
- dunecontrol --opts=$DUNE_OPTS_FILE --current all
- |
pushd build-cmake
python3 ../bin/testing/findtests.py -f affectedtests.json -t origin/master
popd
artifacts:
paths:
- build-cmake
expire_in: 3 hours
build dumux:
stage: build
script:
- |
pushd build-cmake
make clean
python3 ../bin/testing/runselectedtests.py -c affectedtests.json -b
popd
artifacts:
paths:
- build-cmake
expire_in: 3 hours
needs:
- job: select tests
artifacts: true
test dumux:
stage: test
script:
- |
pushd build-cmake
python3 ../bin/testing/runselectedtests.py -c affectedtests.json -t
popd
needs:
- job: build dumux
artifacts: true
artifacts:
reports:
junit: junit/dumux-cmake.xml
default:
image: $IMAGE
stages:
- build
- test
workflow:
rules:
- if: $CI_PIPELINE_SOURCE=="parent_pipeline"
build dumux:
stage: build
script:
- dunecontrol --opts=$DUNE_OPTS_FILE --current all
- dunecontrol --opts=$DUNE_OPTS_FILE --current bexec make -k -j4 build_tests
artifacts:
paths:
- build-cmake
expire_in: 3 hours
test dumux:
stage: test
script:
- dunecontrol --opts=$DUNE_OPTS_FILE --current bexec dune-ctest -j4 --output-on-failure
needs:
- job: build dumux
artifacts: true
artifacts:
reports:
junit: junit/dumux-cmake.xml
#!/usr/bin/env python3
"""
Find those tests that are affected by changes
Run this in the build directory
Warning: This runs 'make clean' on the build directory
"""
import json
import subprocess
from argparse import ArgumentParser
from glob import glob
from subprocess import PIPE
import os
# Check if the set a contains a member of list b
def hasCommonMember(myset, mylist):
return not myset.isdisjoint(mylist)
# make dry run and return the compilation command
def getCompileCommand(testConfig):
lines = subprocess.check_output(["make", "--dry-run",
testConfig["target"]],
encoding='ascii').splitlines()
commands = list(filter(lambda comm: 'g++' in comm, lines))
assert len(commands) <= 1
return commands[0] if commands else None
# get the command and folder to compile the given test
def buildCommandAndDir(testConfig, cache):
compCommand = getCompileCommand(testConfig)
if compCommand is None:
with open(cache) as c:
data = json.load(c)
return data["command"], data["dir"]
else:
(_, dir), command = [comm.split() for comm in compCommand.split("&&")]
with open(cache, "w") as c:
json.dump({"command": command, "dir": dir}, c)
return command, dir
# check if a test is affected by changes in the given files
def isAffectedTest(testConfigFile, changed_files):
with open(testConfigFile) as configFile:
testConfig = json.load(configFile)
cacheFile = "TestTargets/" + testConfig["target"] + ".json"
command, dir = buildCommandAndDir(testConfig, cacheFile)
mainFile = command[-1]
# detect headers included in this test
# -MM skips headers from system directories
# -H prints the name(+path) of each used header
# for some reason g++ writes to stderr
headers = subprocess.run(command + ["-MM", "-H"],
stderr=PIPE, stdout=PIPE, cwd=dir,
encoding='ascii').stderr.splitlines()
# filter only headers from this project and turn them into relative paths
projectDir = os.path.abspath(os.getcwd().rstrip("build-cmake"))
def isProjectHeader(headerPath):
return projectDir in headerPath
test_files = [os.path.relpath(mainFile.lstrip(". "), projectDir)]
test_files.extend([os.path.relpath(header.lstrip(". "), projectDir)
for header in filter(isProjectHeader, headers)])
test_files = set(test_files)
if hasCommonMember(changed_files, test_files):
return True, testConfig["name"], testConfig["target"]
return False, testConfig["name"], testConfig["target"]
if __name__ == '__main__':
# parse input arguments
parser = ArgumentParser(description='Find tests affected by changes')
parser.add_argument('-s', '--source', required=False, default='HEAD',
help='The source tree (default: `HEAD`)')
parser.add_argument('-t', '--target', required=False, default='master',
help='The tree to compare against (default: `master`)')
parser.add_argument('-f', '--outfile', required=False,
default='affectedtests.json',
help='The file in which to write the affected tests')
args = vars(parser.parse_args())
# find the changes files
changed_files = subprocess.check_output(["git", "diff-tree",
"-r", "--name-only",
args['source'], args['target']],
encoding='ascii').splitlines()
changed_files = set(changed_files)
# clean build directory
subprocess.run(["make", "clean"])
subprocess.run(["make"])
# create cache folder
os.makedirs("TestTargets", exist_ok=True)
# detect affected tests
print("Detecting affected tests:")
count = 0
affectedTests = {}
for test in glob("TestMetaData/*json"):
affected, name, target = isAffectedTest(test, changed_files)
if affected:
print("\t- {}".format(name))
affectedTests[name] = {'target': target}
count += 1
print("Detected {} affected tests".format(count))
with open(args['outfile'], 'w') as jsonFile:
json.dump(affectedTests, jsonFile)
#!/usr/bin/env python3
"""
Build and/or run (using `dune-ctest`) a selection of tests.
Run this in the top level of the build tree.
"""
import sys
import json
import subprocess
from argparse import ArgumentParser
# require Python 3
if sys.version_info.major < 3:
sys.exit('Python 3 required')
def buildTests(config, flags=['j4']):
if not config:
print('No tests to be built')
return
# The MakeFile generated by cmake contains the .NOTPARALLEL statement, as
# it only allows one call to `CMakeFiles/Makefile2` at a time. Parallelism
# is taken care of within that latter Makefile. Therefore, we create a
# small custom Makefile here on top of `Makefile2`, where we define a new
# target, composed of affected tests, that can be built in parallel
with open('TestMakeFile', 'w') as makeFile:
# include make file generated by cmake
makeFile.write('include CMakeFiles/Makefile2\n')
# define a new target composed of the test targets
makeFile.write('testselection: ')
makeFile.write(' '.join([tc['target'] for tc in config.values()]))
subprocess.run(['make', '-f', 'TestMakeFile'] + flags + ['testselection'],
check=True)
def runTests(config, script='', flags=['-j4', '--output-on-failure']):
tests = list(config.keys())
if not tests:
print('No tests to be run. Letting dune-ctest produce empty report.')
tests = ['NOOP']
# if not given, try system-wide call to dune-ctest
call = ['dune-ctest'] if not script else ['./' + script.lstrip('./')]
call.extend(flags)
call.extend(['-R'] + tests)
subprocess.run(call, check=True)
if __name__ == '__main__':
parser = ArgumentParser(description='Build or run a selection of tests')
parser.add_argument('-a', '--all',
required=False,
action='store_true',
help='use this flag to build/run all tests')
parser.add_argument('-c', '--config',
required=False,
help='json file with configuration of tests to be run')
parser.add_argument('-s', '--script',
required=False,
default='',
help='provide the path to the dune-ctest script')
parser.add_argument('-b', '--build',
required=False,
action='store_true',
help='use this flag to build the tests')
parser.add_argument('-t', '--test',
required=False,
action='store_true',
help='use this flag to run the tests')
parser.add_argument('-bf', '--buildflags',
required=False,
default='-j4',
help='set the flags passed to make')
parser.add_argument('-tf', '--testflags',
required=False,
default='-j4 --output-on-failure',
help='set the flags passed to ctest')
args = vars(parser.parse_args())
if not args['build'] and not args['test']:
sys.exit('Neither `build` not `test` flag was set. Exiting.')
if args['config'] and args['all']:
sys.exit('Error: both `config` and `all` specified. '
'Please set only one of these arguments.')
# prepare build and test flags
buildFlags = args['buildflags'].split(' ')
testFlags = args['testflags'].split(' ')
# use target `all`
if args['all']:
if args['build']:
print('Building all tests')
subprocess.run(['make'] + buildFlags + ['build_tests'], check=True)
if args['test']:
print('Running all tests')
subprocess.run(['ctest'] + testFlags, check=True)
# use target selection
else:
with open(args['config']) as configFile:
config = json.load(configFile)
numTests = len(config)
print('{} tests found in the configuration file'.format(numTests))
if args['build']:
buildTests(config, buildFlags)
if args['test']:
runTests(config, args['script'], testFlags)
......@@ -204,6 +204,61 @@
# future Dune features with older Dune versions supported by Dumux
function(dumux_add_test)
dune_add_test(${ARGV})
include(CMakeParseArguments)
set(OPTIONS EXPECT_COMPILE_FAIL EXPECT_FAIL SKIP_ON_77 COMPILE_ONLY)
set(SINGLEARGS NAME TARGET TIMEOUT)
set(MULTIARGS SOURCES COMPILE_DEFINITIONS COMPILE_FLAGS LINK_LIBRARIES CMD_ARGS MPI_RANKS COMMAND CMAKE_GUARD LABELS)
cmake_parse_arguments(ADDTEST "${OPTIONS}" "${SINGLEARGS}" "${MULTIARGS}" ${ARGN})
if(NOT ADDTEST_NAME)
# try deducing the test name from the executable name
if(ADDTEST_TARGET)
set(ADDTEST_NAME ${ADDTEST_TARGET})
endif()
# try deducing the test name form the source name
if(ADDTEST_SOURCES)
# deducing a name is only possible with a single source argument
list(LENGTH ADDTEST_SOURCES len)
if(NOT len STREQUAL "1")
message(FATAL_ERROR "Cannot deduce test name from multiple sources!")
endif()
# strip file extension
get_filename_component(ADDTEST_NAME ${ADDTEST_SOURCES} NAME_WE)
endif()
endif()
if(NOT ADDTEST_COMMAND)
set(ADDTEST_COMMAND ${ADDTEST_NAME})
endif()
# Find out whether this test should be a dummy
set(SHOULD_SKIP_TEST FALSE)
set(FAILED_CONDITION_PRINTING "")
foreach(condition ${ADDTEST_CMAKE_GUARD})
separate_arguments(condition)
if(NOT (${condition}))
set(SHOULD_SKIP_TEST TRUE)
set(FAILED_CONDITION_PRINTING "${FAILED_CONDITION_PRINTING}std::cout << \" ${condition}\" << std::endl;\n")
endif()
endforeach()
# If we do nothing, switch the sources for a dummy source
if(SHOULD_SKIP_TEST)
dune_module_path(MODULE dune-common RESULT scriptdir SCRIPT_DIR)
set(ADDTEST_TARGET)
set(dummymain ${CMAKE_CURRENT_BINARY_DIR}/main77_${ADDTEST_NAME}.cc)
configure_file(${scriptdir}/main77.cc.in ${dummymain})
set(ADDTEST_SOURCES ${dummymain})
endif()
# Add the executable if it is not already present
if(ADDTEST_SOURCES)
set(ADDTEST_TARGET ${ADDTEST_NAME})
endif()
file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/TestMetaData")
file(WRITE "${CMAKE_BINARY_DIR}/TestMetaData/${ADDTEST_NAME}.json"
"\{\n \"name\": \"${ADDTEST_NAME}\",\n \"target\": \"${ADDTEST_TARGET}\"\n\}\n")
endfunction()
# Evaluate test guards like dune_add_test internally does
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment