/*********************************************************************************************************************** steps is used from buildTestDeploy ***********************************************************************************************************************/ ArrayList<String> BUILD_PLAN = [] String BRANCH_UNDER_TEST = "" /**********************************************************************************************************************/ // helper function, convert dependency name as listed in the .jenkinsfile into a Jenkins project name def dependencyToJenkinsProject(String dependency, boolean forceBranches = false) { def dependencyProjectName = dependency if(dependency.contains('@')) { // get rid of build number if specified dependency = dependency.split('@')[0] } def (dependencyFolder, dependencyProject) = dependencyProjectName.split('/',2) dependencyProject = dependencyProject.replace('/','%2F') def jobType = env.JOB_TYPE jobType = jobType?.minus("-testing") jobType = jobType?.minus("-analysis") branch = "master" if(jobType == "branches") { println("Special branches check: ${dependency}") println(JOB_NAME) println(BRANCH_UNDER_TEST) println(BUILD_PLAN) println(BUILD_PLAN.flatten().size()) println(BUILD_PLAN.flatten().indexOf(dependency)) def dependencyUnderTest = jekinsProjectToDependency(BRANCH_UNDER_TEST) println("dependencyUnderTest = ${dependencyUnderTest}") if(!forceBranches && (BUILD_PLAN.flatten().indexOf(dependency) < 0 || BRANCH_UNDER_TEST == JOB_NAME) && dependencyUnderTest != dependency) { jobType = "fasttrack" } else { def (butFolder, butType, butProject, butBranch) = BRANCH_UNDER_TEST.split('/') if(butFolder == dependencyFolder && butType == jobType && butProject == dependencyProject) { branch = butBranch println("dependency matches branch under test.") } } println("jobType = ${jobType}") println("branch = ${branch}") } if(dependencyFolder != "DOOCS") { dependencyProjectName = "${dependencyFolder}/${jobType}/${dependencyProject}/${branch}" } else { dependencyProjectName = "${dependencyFolder}/${dependencyProject}" } return dependencyProjectName } /**********************************************************************************************************************/ // helper function, convert Jenkins project name into dependency name (as listed e.g. in the .jenkinsfile) def jekinsProjectToDependency(String jenkinsProject) { def projectSplit = jenkinsProject.split('/') if(projectSplit.size() != 4) { println(projectSplit.size()) error("Jenkins project name '${jenkinsProject}' has the wrong format for jekinsProjectToDependency()") } def (folder, type, project, branch) = projectSplit return "${folder}/${project}" } /**********************************************************************************************************************/ // helper function, recursively gather a deep list of dependencies def gatherDependenciesDeep(ArrayList<String> dependencyList) { script { println("gatherDependenciesDeep(${dependencyList})") def deepList = dependencyList dependencyList.each { dependency -> if(dependency == "") return; def dependencyCleaned = dependency.replace('/','_') myFile = readFile("/home/msk_jenkins/dependency-database/reverse/${dependencyCleaned}") deepList.addAll(gatherDependenciesDeep(new ArrayList<String>(Arrays.asList(myFile.split("\n"))))) } return deepList.unique() } } /**********************************************************************************************************************/ // Helper function to get list of downstream projects def findReverseDependencies(String project) { def projectCleaned = project.replace("/","_") sh """ if [ -d /home/msk_jenkins/dependency-database/forward/${projectCleaned} ]; then cd /home/msk_jenkins/dependency-database/forward/${projectCleaned} cat * cat * > "${WORKSPACE}/dependees.txt" else # no downstream projects present: create empty list rm -f ${WORKSPACE}/dependees.txt touch ${WORKSPACE}/dependees.txt fi """ def revDeps = readFile(env.WORKSPACE+"/dependees.txt").tokenize("\n") sh """ rm -f ${WORKSPACE}/dependees.txt """ return revDeps } /**********************************************************************************************************************/ def generateBuildPlan() { println("Generating build plan for ${JOB_NAME}...") def depName = jekinsProjectToDependency(JOB_NAME) sh """ cd ${WORKSPACE} /home/msk_jenkins/generateBuildPlan ${depName} """ def text = readFile(env.WORKSPACE+"/buildplan.txt") return new groovy.json.JsonSlurper().parseText(text.replace("'", '"')) } /**********************************************************************************************************************/ def getArtefactName(boolean forReading, String basename, String label, String buildType, String dependency = jekinsProjectToDependency(JOB_NAME)) { // Compute name for artifact in local file storage on the build node. This saves time and is more flexible than using // Jenkins archiveArtifact/copyArtifact, but won't work when using multiple build nodes. // // "basename" is the filename with extension but without path. It should not contain any job/build specific parts, as // this will be taken care of in the path. Typical values are "build" and "install". // // This function also creates the directory if not yet existing. def dependencyNoBuildNo = dependency if(dependencyNoBuildNo.contains('@')) { // get rid of build number if specified dependencyNoBuildNo = dependency.split('@')[0] } def jobName = dependencyToJenkinsProject(dependencyNoBuildNo) def JOBNAME_CLEANED=jobName.replace('/','_') println("getArtefactName(${forReading}, ${basename}, ${label}, ${buildType}, ${dependency})") println("jobName = ${jobName}") println("JOBNAME_CLEANED = ${JOBNAME_CLEANED}") def path = "/home/msk_jenkins/artifacts/${JOBNAME_CLEANED}/${label}/${buildType}" def buildNumer = null if(forReading) { def upstreamCause = currentBuild.rawBuild.getCause(Cause.UpstreamCause) println("upstreamCause = ${upstreamCause}") if(upstreamCause) { println("upstreamCause.getUpstreamProject() = ${upstreamCause.getUpstreamProject()}") } if(dependency.contains('@')) { buildNumber = dependency.split('@',2)[1] println("Build number from dependency name!") } else if(upstreamCause && upstreamCause.getUpstreamProject() == jobName) { // looking for build name of job which triggered us buildNumber = upstreamCause.getUpstreamBuild() println("Build number from upstream trigger!") } else { // determine latest available build println("path = ${path}") upstreamCause = null // the object may not be serializable which causes an exception when executing sh buildNumber = sh ( script: "ls ${path} | sort -n | tail -n1", returnStdout: true ).trim() println("Build number from latest build!") } } else { buildNumber = BUILD_NUMBER } println("buildNumber = ${buildNumber}") path = path+"/"+buildNumber sh """ mkdir -p ${path} chown msk_jenkins:msk_jenkins -R ${path} """ println("path = ${path}") return "${path}/${basename}" } /**********************************************************************************************************************/ def getBuildNumberFromArtefactFileName(String fileName) { def components = fileName.split('/') return components[components.size()-2] } /**********************************************************************************************************************/ def doBuildAndDeploy(ArrayList<String> dependencyList, String label, String buildType, String gitUrl) { // prepare source directory and dependencies doPrepare(true, gitUrl) doDependencyArtefacts(dependencyList, label, buildType) // add inactivity timeout of 30 minutes (build will be interrupted if 30 minutes no log output has been produced) timeout(activity: true, time: 30) { // perform build and generate build artefact doBuild(label, buildType) // deploy and generate deployment artefact doDeploy(label, buildType) } } /**********************************************************************************************************************/ def doTesting(String label, String buildType) { // prepare source directory and dependencies doPrepare(false) doBuilddirArtefact(label, buildType) // add inactivity timeout of 30 minutes (build will be interrupted if 30 minutes no log output has been produced) timeout(activity: true, time: 30) { // run tests doRunTests(label, buildType) } } /**********************************************************************************************************************/ def doAnalysis(String label, String buildType) { if(buildType == "Debug") { doPrepare(false) doBuilddirArtefact(label, buildType) // Add inactivity timeout of 60 minutes (build will be interrupted if 60 minutes no log output has been produced) timeout(activity: true, time: 60) { // Coverage report only works well in Debug mode, since optimisation might lead to underestimated coverage doCoverage(label, buildType) // Run valgrind only in Debug mode, since Release mode often leads to no-longer-matching suppressions // -> disable for now, doesn't work well and is probably replaced by asan //doValgrind(label, buildType) } } else if(buildType != "Release") { // asan and tsan modes doPrepare(false) doBuilddirArtefact(label, buildType) // Add inactivity timeout of 60 minutes (build will be interrupted if 60 minutes no log output has been produced) timeout(activity: true, time: 60) { // just run the tests doSanitizerAnalysis(label, buildType) } } } /**********************************************************************************************************************/ def doPrepare(boolean checkoutScm, String gitUrl='') { // Job name without slashes, to be used as filename/directory component env.JOBNAME_CLEANED=env.JOB_NAME.replace('/','_') // configure sudoers file so we can change the PATH variable sh ''' mv /etc/sudoers /etc/sudoers-backup grep -v secure_path /etc/sudoers-backup > /etc/sudoers ''' // Make sure, /var/run/lock/mtcadummy is writeable by msk_jenkins. // Create scratch directory. Keep the absolute path fixed, so we can copy the build directory as an artefact for the // analysis job sh ''' chmod ugo+rwX /var/run/lock/mtcadummy mkdir /scratch chown msk_jenkins /scratch ''' // Check out source code if(checkoutScm) { if(gitUrl != '') { if (env.BRANCH_NAME && env.BRANCH_NAME != '') { git branch: env.BRANCH_NAME, url: gitUrl } else { git gitUrl } sh 'sudo -H -E -u msk_jenkins git submodule update --init --recursive' } else { checkout scm } sh ''' sudo -H -E -u msk_jenkins git clean -f -d -x sudo -H -E -u msk_jenkins mkdir /scratch/source sudo -H -E -u msk_jenkins cp -r * /scratch/source ''' } } /**********************************************************************************************************************/ def doDependencyArtefacts(ArrayList<String> dependencyList, String label, String buildType, ArrayList<String> obtainedArtefacts=[]) { // obtain artefacts of dependencies script { if(dependencyList.size() == 0) return; dependencyList.each { // skip empty string, seems to come always at end of list if(it == '') return; // provide sensible error message if .jenkinsfile has wrong dependency format somewhere if(it.indexOf('/') == -1) { currentBuild.result = 'ERROR' error("ERROR: Dependency has the wrong format: '${it}'") } // generate job name from dependency name def dependency = dependencyToJenkinsProject(it) // cleaned version of the job name without slashes, for use in filenames etc. def dependency_cleaned = dependency.replace('/','_') // skip if artefact is already downloaded if(obtainedArtefacts.find{it == dependency} == dependency) return; obtainedArtefacts.add(dependency) // download the artefact //copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts" // unpack artefact def theFile = getArtefactName(true, "install.tgz", label, buildType, it) println("theFile = ${theFile}") sh """ #tar xf \"artefacts/install-${dependency_cleaned}-${label}-${buildType}.tgz\" -C / --keep-directory-symlink --use-compress-program="pigz -9 -p32" tar xf \"${theFile}\" -C / --keep-directory-symlink --use-compress-program="pigz -9 -p32" """ // keep track of dependencies to download - used when dependees need to resolve our dependencies def depBuildNo = getBuildNumberFromArtefactFileName(theFile) println("it = ${it}") println("depBuildNo = ${depBuildNo}") sh """ touch /scratch/artefact.list if [[ "${it}" == *"@"* ]]; then echo "${it}" >> /scratch/artefact.list else echo "${it}@${depBuildNo}" >> /scratch/artefact.list fi """ // process dependencies of the dependency we just downloaded sh """ touch /scratch/dependencies.${dependency_cleaned}.list cp /scratch/dependencies.${dependency_cleaned}.list ${WORKSPACE}/artefact.list """ myFile = readFile(env.WORKSPACE+"/artefact.list") doDependencyArtefacts(new ArrayList<String>(Arrays.asList(myFile.split("\n"))), label, buildType, obtainedArtefacts) } // fix ownership sh """ chown -R msk_jenkins /scratch """ } } /**********************************************************************************************************************/ def doBuilddirArtefact(String label, String buildType) { // obtain artefacts of dependencies script { sh """ rm -rf ${WORKSPACE}/artefacts """ def buildJob = env.BUILD_JOB def buildJob_cleaned = buildJob.replace('/','_') //copyArtifacts filter: "build-${buildJob_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${buildJob}", selector: lastSuccessful(), target: "artefacts" def theFile = getArtefactName(true, "build.tgz", label, buildType) // Unpack artefact into the Docker system root (should only write files to /scratch, which is writable by msk_jenkins). // Then obtain artefacts of dependencies (from /scratch/artefact.list) sh """ #for a in artefacts/build-*-${label}-${buildType}.tgz ; do # sudo -H -E -u msk_jenkins tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32" #done sudo -H -E -u msk_jenkins tar xf \"${theFile}\" -C / --use-compress-program="pigz -9 -p32" touch /scratch/artefact.list cp /scratch/artefact.list ${WORKSPACE}/artefact.list """ myFile = readFile(env.WORKSPACE+"/artefact.list") myFile.split("\n").each { if( it != "" ) { def dependency = dependencyToJenkinsProject(it) def dependency_cleaned = dependency.replace('/','_') // copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts" theFile = getArtefactName(true, "install.tgz", label, buildType, it) sh """ tar xf \"${theFile}\" -C / --use-compress-program="pigz -9 -p32" """ } } } // unpack artefacts of dependencies into the Docker system root sh """ #if ls artefacts/install-*-${label}-${buildType}.tgz 1>/dev/null 2>&1; then # for a in artefacts/install-*-${label}-${buildType}.tgz ; do # tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32" # done #fi """ // fix ownership sh """ chown -R msk_jenkins /scratch """ } /**********************************************************************************************************************/ def doBuild(String label, String buildType) { catchError { // start the build sh """ chown -R msk_jenkins /scratch cat > /scratch/script <<EOF #!/bin/bash mkdir -p /scratch/build-${JOBNAME_CLEANED} mkdir -p /scratch/install cd /scratch/build-${JOBNAME_CLEANED} # Required to find DOOCS export PKG_CONFIG_PATH=/export/doocs/lib/pkgconfig # We might run only part of the project from a sub-directory. If it is empty the trailing / does not confuse cmake for VAR in \${JOB_VARIABLES}; do export \\`eval echo \\\${VAR}\\` done if [ "${buildType}" == "tsan" ]; then export CC="clang-10" export CXX="clang++-10" elif [ "${buildType}" == "asan" ]; then export CC="clang-10" export CXX="clang++-10" export LSAN_OPTIONS=verbosity=1:log_threads=1 fi if [ "${DISABLE_TEST}" == "true" ]; then BUILD_TESTS_OPT="-DBUILD_TESTS=OFF" echo \\\${BUILD_TESTS_OPT} fi echo \\\${BUILD_TESTS_OPT} cmake /scratch/source/\${RUN_FROM_SUBDIR} -GNinja -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_BUILD_TYPE=${buildType} -DSUPPRESS_AUTO_DOC_BUILD=true \${CMAKE_EXTRA_ARGS} \\\${BUILD_TESTS_OPT} ninja -v ${MAKEOPTS} EOF cat /scratch/script chmod +x /scratch/script sudo -H -E -u msk_jenkins /scratch/script """ } script { // generate and archive artefact from build directory (used e.g. for the analysis job) def theFile = getArtefactName(false, "build.tgz", label, buildType) sh """ sudo -H -E -u msk_jenkins tar cf \"${theFile}\" /scratch --use-compress-program="pigz -9 -p32" """ //archiveArtifacts artifacts: "build-${JOBNAME_CLEANED}-${label}-${buildType}.tgz", onlyIfSuccessful: false } } /**********************************************************************************************************************/ def doRunTests(String label, String buildType) { if (env.SKIP_TESTS) { currentBuild.result = 'UNSTABLE' return } def buildJob = env.BUILD_JOB def buildJob_cleaned = buildJob.replace('/','_') // Run the tests via ctest // Prefix test names with label and buildType, so we can distinguish them later // Copy test results files to the workspace, otherwise they are not available to the xunit plugin sh """ cat > /scratch/script <<EOF #!/bin/bash cd /scratch/build-${buildJob_cleaned} cmake . -DBUILD_TESTS=ON ninja \${MAKEOPTS} if [ -z "\${CTESTOPTS}" ]; then CTESTOPTS="\${MAKEOPTS}" fi for VAR in \${JOB_VARIABLES} \${TEST_VARIABLES}; do export \\`eval echo \\\${VAR}\\` done ctest --no-compress-output \${CTESTOPTS} -T Test -V || true echo sed -i Testing/*/Test.xml -e 's|\\(^[[:space:]]*<Name>\\)\\(.*\\)\\(</Name>\\)\$|\\1${label}.${buildType}.\\2\\3|' sed -i Testing/*/Test.xml -e 's|\\(^[[:space:]]*<Name>\\)\\(.*\\)\\(</Name>\\)\$|\\1${label}.${buildType}.\\2\\3|' rm -rf "${WORKSPACE}/Testing" cp -r /scratch/build-${buildJob_cleaned}/Testing "${WORKSPACE}" EOF cat /scratch/script chmod +x /scratch/script sudo -H -E -u msk_jenkins /scratch/script """ // Publish test result directly (works properly even with multiple publications from parallel branches) xunit (thresholds: [ skipped(failureThreshold: '0'), failed(failureThreshold: '0') ], tools: [ CTest(pattern: "Testing/*/*.xml") ]) } /**********************************************************************************************************************/ def doSanitizerAnalysis(String label, String buildType) { def parentJob = env.JOBNAME_CLEANED[0..-10] // remove "-analysis" from the job name, which is 9 chars long // Run the tests via ctest // Prefix test names with label and buildType, so we can distinguish them later // Copy test results files to the workspace, otherwise they are not available to the xunit plugin sh """ cat > /scratch/script <<EOF #!/bin/bash cd /scratch/build-${parentJob} if [ -z "\${CTESTOPTS}" ]; then CTESTOPTS="\${MAKEOPTS}" fi for VAR in \${JOB_VARIABLES} \${TEST_VARIABLES}; do export \\`eval echo \\\${VAR}\\` done export LSAN_OPTIONS="suppressions=/home/msk_jenkins/JenkinsConfiguration/sanitizer.suppressions/lsan.supp,\${LSAN_OPTIONS}" export UBSAN_OPTIONS="suppressions=/home/msk_jenkins/JenkinsConfiguration/sanitizer.suppressions/ubsan.supp,\${UBSAN_OPTIONS}" export TSAN_OPTIONS="second_deadlock_stack=1,suppressions=/home/msk_jenkins/JenkinsConfiguration/sanitizer.suppressions/tsan.supp,\${TSAN_OPTIONS}" ctest --no-compress-output \${CTESTOPTS} -T Test -V EOF cat /scratch/script chmod +x /scratch/script sudo -H -E -u msk_jenkins /scratch/script """ } /**********************************************************************************************************************/ def doCoverage(String label, String buildType) { def parentJob = env.JOBNAME_CLEANED[0..-10] // remove "-analysis" from the job name, which is 9 chars long // Generate coverage report as HTML and also convert it into cobertura XML file sh """ chown msk_jenkins -R /scratch cat > /scratch/script <<EOF #!/bin/bash cd /scratch/build-${parentJob} for VAR in \${JOB_VARIABLES} \${TEST_VARIABLES}; do export \\`eval echo \\\${VAR}\\` done ninja coverage || true python3 /common/lcov_cobertura-1.6/lcov_cobertura/lcov_cobertura.py coverage.info || true cp -r coverage_html ${WORKSPACE} || true cp -r coverage.xml ${WORKSPACE} || true EOF cat /scratch/script chmod +x /scratch/script sudo -H -E -u msk_jenkins /scratch/script """ // stash cobertura coverage report result for later publication stash allowEmpty: true, includes: "coverage.xml", name: "cobertura-${label}-${buildType}" // publish HTML coverage report now, since it already allows publication of multiple distinguised reports publishHTML (target: [ allowMissing: true, alwaysLinkToLastBuild: false, keepAll: false, reportDir: "coverage_html", reportFiles: 'index.html', reportName: "LCOV coverage report for ${label} ${buildType}" ]) } /**********************************************************************************************************************/ def doDeploy(String label, String buildType) { // Install, but redirect files into the install directory (instead of installing into the system) // Generate tar ball of install directory - this will be the artefact used by our dependents def theFile = getArtefactName(false, "install.tgz", label, buildType) sh """ cd /scratch/build-${JOBNAME_CLEANED} sudo -H -E -u msk_jenkins bash -c 'DESTDIR=../install ninja install' cd /scratch/install mkdir -p scratch if [ -e /scratch/artefact.list ]; then cp /scratch/artefact.list scratch/dependencies.${JOBNAME_CLEANED}.list fi #sudo -H -E -u msk_jenkins tar cf ${WORKSPACE}/install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz . --use-compress-program="pigz -9 -p32" sudo -H -E -u msk_jenkins tar cf ${theFile} . --use-compress-program="pigz -9 -p32" """ // Downstream projects are triggered in buildAndDeply of the first project only. if(env.JOB_TYPE == "branches") return // Get list of downstream projects def dependees = findReverseDependencies(jekinsProjectToDependency(JOB_NAME)) println(dependees) // For each downstream project: dependees.each { dependee -> if(dependee == '') return; println(dependee) // check whether it has a dependency (direct or indirect) which is currently building (excluding ourselves) def myDeps = gatherDependenciesDeep([dependee]) def triggerDependee = true myDeps.each { myDep -> if(myDep == "" || !triggerDependee) return; def myDepJob = dependencyToJenkinsProject(myDep) if(myDepJob == JOB_NAME) return; def job = jenkins.model.Jenkins.instance.getItemByFullName(myDepJob) def building = false building = job?.isBuilding() || job?.isInQueue() if(building) { triggerDependee = false println("Not triggering ${dependee} as ${myDepJob} is still building.") } } // trigger it if(triggerDependee) { build job: dependencyToJenkinsProject(dependee), propagate: false, wait: false } } } /**********************************************************************************************************************/ def doPublishBuild(ArrayList<String> builds) { // Scan for compiler warnings. This is scanning the entire build logs for all labels and build types recordIssues filters: [excludeMessage('.*-Wstrict-aliasing.*')], qualityGates: [[threshold: 1, type: 'TOTAL', unstable: true]], tools: [gcc()] } /**********************************************************************************************************************/ def doPublishAnalysis(ArrayList<String> builds) { // Note: this part runs only once per project, not for each branch! // unstash result files into subdirectories builds.each { dir("${it}") { def (label, buildType) = it.tokenize('-') // get cobertura coverage result (only Debug) if(buildType == "Debug") { try { unstash "cobertura-${it}" } catch(all) { echo("Could not retreive stashed cobertura results for ${it}") currentBuild.result = 'FAILURE' } } } } // publish cobertura result cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: "*/coverage.xml", conditionalCoverageTargets: '70, 0, 0', failNoReports: false, failUnhealthy: false, failUnstable: false, lineCoverageTargets: '80, 0, 0', maxNumberOfBuilds: 0, methodCoverageTargets: '80, 0, 0', onlyStable: false, sourceEncoding: 'ASCII' }