From 1fd1ef62f0b67d40a9697014ae813e54f9ec7ceb Mon Sep 17 00:00:00 2001
From: Martin Hierholzer <martin.hierholzer@desy.de>
Date: Wed, 9 Feb 2022 13:50:29 +0100
Subject: [PATCH] (re)introduce testing

---
 vars/autojob.groovy                           |  10 +-
 ...estDeploy.groovy => buildAndDeploy.groovy} |  15 +-
 vars/buildDoocsLibrary.groovy                 |   1 +
 vars/helper.groovy                            | 133 +++++++-----------
 vars/testing.groovy                           |  93 ++++++++++++
 5 files changed, 153 insertions(+), 99 deletions(-)
 rename vars/{buildTestDeploy.groovy => buildAndDeploy.groovy} (92%)
 create mode 100644 vars/testing.groovy

diff --git a/vars/autojob.groovy b/vars/autojob.groovy
index cb33d3b..d9e99fe 100644
--- a/vars/autojob.groovy
+++ b/vars/autojob.groovy
@@ -14,18 +14,18 @@ def call(ArrayList<String> dependencyList, String gitUrl='',
 //                                   'tumbleweed-Release']) {
 
 
-  def (organisation, job_type, project) = env.JOB_NAME.tokenize('/')
+  def (organisation, job_type, project, branch) = env.JOB_NAME.tokenize('/')
   env.ORGANISATION = organisation
   env.JOB_TYPE = job_type
   env.PROJECT = project
+  env.BRANCH = branch
 
   if(job_type == 'fasttrack') {
     env.DISABLE_TEST=true
-    buildTestDeploy(dependencyList, gitUrl, ['focal-Debug'])
+    buildAndDeploy(dependencyList, gitUrl, ['focal-Debug'])
   }
-  else if(job_type == 'fasttrack-test') {
-    env.DISABLE_TEST=true
-    buildTestDeploy(dependencyList, gitUrl, ['focal-Debug'])
+  else if(job_type == 'fasttrack-testing') {
+    testing()
   }
   else {
     echo("Unknown job type: ${job_type}")
diff --git a/vars/buildTestDeploy.groovy b/vars/buildAndDeploy.groovy
similarity index 92%
rename from vars/buildTestDeploy.groovy
rename to vars/buildAndDeploy.groovy
index 25dd25e..c6b131a 100644
--- a/vars/buildTestDeploy.groovy
+++ b/vars/buildAndDeploy.groovy
@@ -6,13 +6,7 @@
 
 // This is the function called from the .jenkinsfile
 // The last optional argument is the list of builds to be run. Format must be "<docker_image_name>-<cmake_build_type>"
-def call(ArrayList<String> dependencyList, String gitUrl='',
-         ArrayList<String> builds=['focal-Debug',
-                                   'focal-Release',
-                                   'focal-tsan',
-                                   'focal-asan']) {
-//                                   'tumbleweed-Debug',
-//                                   'tumbleweed-Release']) {
+def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> builds) {
 
   def dependencyJobList = new ArrayList<String>()
 
@@ -70,11 +64,12 @@ def call(ArrayList<String> dependencyList, String gitUrl='',
 
     // setup build trigger
     triggers {
-      pollSCM('H/5 * * * *')
+      pollSCM('H/1 * * * *')
       upstream(upstreamProjects: dependencies, threshold: hudson.model.Result.UNSTABLE)
     }
     options {
       //disableConcurrentBuilds()
+      quietPeriod(0)
       copyArtifactPermission('*')
       buildDiscarder(logRotator(numToKeepStr: '15', artifactNumToKeepStr: '2'))
     }
@@ -127,7 +122,7 @@ def call(ArrayList<String> dependencyList, String gitUrl='',
       always {
         node('Docker') {
           script {
-            helper.doPublishBuildTestDeploy(builds)
+            helper.doPublishBuild(builds)
           }
         }
         script {
@@ -156,7 +151,7 @@ def transformIntoStep(ArrayList<String> dependencyList, String buildName, String
         def dockerArgs = "-u 0 --privileged --device=/dev/mtcadummys0 --device=/dev/mtcadummys1 --device=/dev/mtcadummys2 --device=/dev/mtcadummys3 --device=/dev/llrfdummys4 --device=/dev/noioctldummys5 --device=/dev/pcieunidummys6 -v /var/run/lock/mtcadummy:/var/run/lock/mtcadummy -v /opt/matlab_R2016b:/opt/matlab_R2016b"
         docker.image("builder:${label}").inside(dockerArgs) {
           script {
-            helper.doBuildTestDeploy(dependencyList, label, buildType, gitUrl)
+            helper.doBuildAndDeploy(dependencyList, label, buildType, gitUrl)
           }
         }
       }
diff --git a/vars/buildDoocsLibrary.groovy b/vars/buildDoocsLibrary.groovy
index dfd75b1..b851aba 100644
--- a/vars/buildDoocsLibrary.groovy
+++ b/vars/buildDoocsLibrary.groovy
@@ -42,6 +42,7 @@ def call(String libraryName, ArrayList<String> dependencyList) {
     }
     options {
       disableConcurrentBuilds()
+      quietPeriod(180)
       copyArtifactPermission('*')
       buildDiscarder(logRotator(numToKeepStr: '30', artifactNumToKeepStr: '10'))
     }
diff --git a/vars/helper.groovy b/vars/helper.groovy
index 0cb1d05..92bb095 100644
--- a/vars/helper.groovy
+++ b/vars/helper.groovy
@@ -9,9 +9,13 @@
 // helper function, convert dependency name as listed in the .jenkinsfile into a Jenkins project name
 def dependencyToJenkinsProject(String dependency) {
   def dependencyProjectName = dependency
-  def (dependencyFolder, dependencyProject) = dependencyProjectName.split('/')
+  def (dependencyFolder, dependencyProject) = dependencyProjectName.split('/',2)
+  dependencyProject = dependencyProject.replace('/','%2F')
+  def jobType = env.JOB_TYPE
+  jobType = jobType.minus("-testing")
+  jobType = jobType.minus("-analysis")
   if(dependencyFolder != "DOOCS") {
-    dependencyProjectName = "${dependencyFolder}/${env.JOB_TYPE}/${dependencyProject}/master"
+    dependencyProjectName = "${dependencyFolder}/${jobType}/${dependencyProject}/master"
   }
   else {
     dependencyProjectName = "${dependencyFolder}/${dependencyProject}"
@@ -39,7 +43,7 @@ def gatherDependenciesDeep(ArrayList<String> dependencyList) {
 
 /**********************************************************************************************************************/
 
-def doBuildTestDeploy(ArrayList<String> dependencyList, String label, String buildType, String gitUrl) {
+def doBuildAndDeploy(ArrayList<String> dependencyList, String label, String buildType, String gitUrl) {
 
   // prepare source directory and dependencies
   doPrepare(true, gitUrl)
@@ -48,19 +52,28 @@ def doBuildTestDeploy(ArrayList<String> dependencyList, String label, String bui
   // add inactivity timeout of 30 minutes (build will be interrupted if 30 minutes no log output has been produced)
   timeout(activity: true, time: 30) {
  
-    // start build and tests, then generate artefact
+    // perform build and generate build artefact
     doBuild(label, buildType)
-    if(buildType != "asan" && buildType != "tsan" && !env.DISABLE_TEST) {
-      // tests for asan and tsan are run in the analysis jobs
-      doTest(label, buildType)
-    }
 
-    // Run cppcheck only for focal-debug
-    //if((!env.DISABLE_CPPCHECK || env.DISABLE_CPPCHECK == '') && buildType == "Debug") {
-    //    doCppcheck(label, buildType)
-    //}
+    // deploy and generate deployment artefact
+    doDeploy(label, buildType)
+
+  }
+}
+
+/**********************************************************************************************************************/
+
+def doTesting(String label, String buildType) {
+
+  // prepare source directory and dependencies
+  doPrepare(false)
+  doBuilddirArtefact(label, buildType)
 
-    doInstall(label, buildType)
+  // add inactivity timeout of 30 minutes (build will be interrupted if 30 minutes no log output has been produced)
+  timeout(activity: true, time: 30) {
+ 
+    // run tests
+    doRunTests(label, buildType)
 
   }
 }
@@ -211,8 +224,9 @@ def doBuilddirArtefact(String label, String buildType) {
   
   // obtain artefacts of dependencies
   script {
-    def parentJob = env.JOBNAME_CLEANED[0..-10]     // remove "-analysis" from the job name, which is 9 chars long
-    copyArtifacts filter: "build-${parentJob}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${parentJob}", selector: lastSuccessful(), target: "artefacts"
+    def buildJob = env.BUILD_JOB
+    def buildJob_cleaned = buildJob.replace('/','_')
+    copyArtifacts filter: "build-${buildJob_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${buildJob}", selector: lastSuccessful(), target: "artefacts"
 
     // Unpack artefact into the Docker system root (should only write files to /scratch, which is writable by msk_jenkins).
     // Then obtain artefacts of dependencies (from /scratch/artefact.list)
@@ -227,7 +241,9 @@ def doBuilddirArtefact(String label, String buildType) {
     myFile = readFile(env.WORKSPACE+"/artefact.list")
     myFile.split("\n").each {
       if( it != "" ) {
-        copyArtifacts filter: "install-${it}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${it}", selector: lastSuccessful(), target: "artefacts"
+        def dependency = dependencyToJenkinsProject(it)
+        def dependency_cleaned = dependency.replace('/','_')
+        copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts"
       }
     }
   }
@@ -240,6 +256,11 @@ def doBuilddirArtefact(String label, String buildType) {
       done
     fi
   """
+    
+  // fix ownership
+  sh """
+    chown -R msk_jenkins /scratch
+  """
 
 }
 
@@ -300,19 +321,24 @@ EOF
 
 /**********************************************************************************************************************/
 
-def doTest(String label, String buildType) {
+def doRunTests(String label, String buildType) {
   if (env.SKIP_TESTS) {
     currentBuild.result = 'UNSTABLE'
     return
   }
 
+  def buildJob = env.BUILD_JOB
+  def buildJob_cleaned = buildJob.replace('/','_')
+
   // Run the tests via ctest
   // Prefix test names with label and buildType, so we can distinguish them later
   // Copy test results files to the workspace, otherwise they are not available to the xunit plugin
   sh """
     cat > /scratch/script <<EOF
 #!/bin/bash
-cd /scratch/build-${JOBNAME_CLEANED}
+cd /scratch/build-${buildJob_cleaned}
+cmake . -DBUILD_TESTS=ON
+ninja \${MAKEOPTS}
 if [ -z "\${CTESTOPTS}" ]; then
   CTESTOPTS="\${MAKEOPTS}"
 fi
@@ -322,7 +348,8 @@ done
 ctest --no-compress-output \${CTESTOPTS} -T Test -V || true
 echo sed -i Testing/*/Test.xml -e 's|\\(^[[:space:]]*<Name>\\)\\(.*\\)\\(</Name>\\)\$|\\1${label}.${buildType}.\\2\\3|'
 sed -i Testing/*/Test.xml -e 's|\\(^[[:space:]]*<Name>\\)\\(.*\\)\\(</Name>\\)\$|\\1${label}.${buildType}.\\2\\3|'
-cp -r /scratch/build-${JOBNAME_CLEANED}/Testing "${WORKSPACE}"
+rm -rf "${WORKSPACE}/Testing"
+cp -r /scratch/build-${buildJob_cleaned}/Testing "${WORKSPACE}"
 EOF
     cat /scratch/script
     chmod +x /scratch/script
@@ -462,7 +489,7 @@ EOF
 
 /**********************************************************************************************************************/
 
-def doInstall(String label, String buildType) {
+def doDeploy(String label, String buildType) {
 
   // Install, but redirect files into the install directory (instead of installing into the system)
   // Generate tar ball of install directory - this will be the artefact used by our dependents
@@ -484,15 +511,7 @@ def doInstall(String label, String buildType) {
 
 /**********************************************************************************************************************/
 
-def doPublishBuildTestDeploy(ArrayList<String> builds) {
-
-  // Note: this part runs only once per project, not for each branch!
-
-  // Run cppcheck and publish the result. Since this is a static analysis, we don't have to run it for each label
-  //if(!env.DISABLE_CPPCHECK || env.DISABLE_CPPCHECK == '') {
-  //  unstash "cppcheck.xml"
-  //  publishCppcheck pattern: 'cppcheck.xml'
-  //}
+def doPublishBuild(ArrayList<String> builds) {
 
   // Scan for compiler warnings. This is scanning the entire build logs for all labels and build types  
   recordIssues filters: [excludeMessage('.*-Wstrict-aliasing.*')], qualityGates: [[threshold: 1, type: 'TOTAL', unstable: true]], tools: [gcc()]
@@ -521,65 +540,11 @@ def doPublishAnalysis(ArrayList<String> builds) {
         }
       }
       
-      // get valgrind result (only Debug)
-      // -> disable for now
-      /*if(buildType == "Debug") {
-        try {
-          unstash "valgrind-${it}"
-        }
-        catch(all) {
-          echo("Could not retreive stashed valgrind results for ${it}")
-          currentBuild.result = 'FAILURE'
-        }
-      }*/
-      
     }
   }
-/*  
-  // publish valgrind result
-  publishValgrind (
-    failBuildOnInvalidReports: true,
-    failBuildOnMissingReports: true,
-    failThresholdDefinitelyLost: '',
-    failThresholdInvalidReadWrite: '',
-    failThresholdTotal: '',
-    pattern: '* / *.valgrind',
-    publishResultsForAbortedBuilds: false,
-    publishResultsForFailedBuilds: false,
-    sourceSubstitutionPaths: '',
-    unstableThresholdDefinitelyLost: '',
-    unstableThresholdInvalidReadWrite: '',
-    unstableThresholdTotal: '0'
-  )
-  */
+
   // publish cobertura result
   cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: "*/coverage.xml", conditionalCoverageTargets: '70, 0, 0', failNoReports: false, failUnhealthy: false, failUnstable: false, lineCoverageTargets: '80, 0, 0', maxNumberOfBuilds: 0, methodCoverageTargets: '80, 0, 0', onlyStable: false, sourceEncoding: 'ASCII'
   
 }
 
-/**********************************************************************************************************************/
-
-def doCppcheck(String label, String buildType) {
-  // Generate coverage report as HTML and also convert it into cobertura XML file
-  sh """
-    chown msk_jenkins -R /scratch
-    cat > /scratch/script <<EOF
-#!/bin/bash
-cd /scratch/build-${JOBNAME_CLEANED}-${label}-${buildType}
-for VAR in \${JOB_VARIABLES} \${TEST_VARIABLES}; do
-   export \\`eval echo \\\${VAR}\\`
-done
-if [ -e compile_commands.json ]; then
-    cppcheck --inline-suppr --enable=all --xml --xml-version=2  --project=compile_commands.json 2>cppcheck.xml
-else
-    cppcheck --inline-suppr --enable=all --xml --xml-version=2  -ibuild -Iinclude /scratch/source 2>cppcheck.xml
-fi
-cp cppcheck.xml ${WORKSPACE} || true
-EOF
-    cat /scratch/script
-    chmod +x /scratch/script
-    sudo -H -E -u msk_jenkins /scratch/script
-  """
-
-  stash allowEmpty: true, includes: "cppcheck.xml", name: "cppcheck.xml"
-}
diff --git a/vars/testing.groovy b/vars/testing.groovy
new file mode 100644
index 0000000..241d12c
--- /dev/null
+++ b/vars/testing.groovy
@@ -0,0 +1,93 @@
+/***********************************************************************************************************************
+
+  test() is called from autojob for fasttrack-test jobs after the corresponding fasttrack job has completed
+
+***********************************************************************************************************************/
+
+// This is the function called from the .jenkinsfile
+def call() {
+
+  // name of the job which ran the build
+  env.BUILD_JOB = "${env.ORGANISATION}/fasttrack/${env.PROJECT}/${env.BRANCH}"
+  
+  ArrayList<String> builds
+
+  pipeline {
+    agent none
+
+    // setup build trigger
+    triggers {
+      upstream(upstreamProjects: BUILD_JOB, threshold: hudson.model.Result.UNSTABLE)
+    }
+    options {
+      disableConcurrentBuilds()
+      copyArtifactPermission('*')
+      buildDiscarder(logRotator(numToKeepStr: '15', artifactNumToKeepStr: '2'))
+    }
+
+    stages {
+      stage('prepare') {
+        steps {
+          script {
+            node('Docker') {
+              // fetch list of build types
+              copyArtifacts filter: "builds.txt", fingerprintArtifacts: true, projectName: BUILD_JOB, selector: lastSuccessful(), target: "artefacts"  
+              def myFile = readFile(env.WORKSPACE+"/artefacts/builds.txt")
+              builds = myFile.split("\n")
+            }
+          }
+        }
+      }
+      stage('test') {
+        // Run the build stages for all labels + build types in parallel, each in a separate docker container
+        steps {
+          script {
+            parallel builds.collectEntries { ["${it}" : transformIntoStep(it)] }
+          }
+        }
+      } // stage build
+    } // end stages
+    post {
+      failure {
+        emailext body: '$DEFAULT_CONTENT', recipientProviders: [brokenTestsSuspects(), brokenBuildSuspects(), developers()], subject: '[Jenkins] $DEFAULT_SUBJECT', to: env.MAILTO
+        //mattermostSend channel: env.JOB_NAME, color: "danger", message: "Build of ${env.JOB_NAME} failed."
+        //mattermostSend channel: "Jenkins", color: "danger", message: "Build of ${env.JOB_NAME} failed."
+      }
+      always {
+        script {
+          if (currentBuild?.getPreviousBuild()?.result == 'FAILURE') {
+            if (!currentBuild.resultIsWorseOrEqualTo(currentBuild.getPreviousBuild().result)) {
+              //mattermostSend channel: env.JOB_NAME, color: "good", message: "Build of ${env.JOB_NAME} is good again."
+              //mattermostSend channel: "Jenkins", color: "good", message: "Build of ${env.JOB_NAME} is good again."
+            }
+          }
+        }
+      } // end always
+    } // end post
+  } // end pipeline
+}
+
+/**********************************************************************************************************************/
+
+def transformIntoStep(String buildName) {
+  // split the build name at the '-'
+  def (label, buildType) = buildName.tokenize('-')
+  // we need to return a closure here, which is then passed to parallel() for execution
+  return {
+    stage(buildName) {
+      node('Docker') {
+        // we need root access inside the container and access to the dummy pcie devices of the host
+        def dockerArgs = "-u 0 --privileged --device=/dev/mtcadummys0 --device=/dev/mtcadummys1 --device=/dev/mtcadummys2 --device=/dev/mtcadummys3 --device=/dev/llrfdummys4 --device=/dev/noioctldummys5 --device=/dev/pcieunidummys6 -v /var/run/lock/mtcadummy:/var/run/lock/mtcadummy -v /opt/matlab_R2016b:/opt/matlab_R2016b"
+        docker.image("builder:${label}").inside(dockerArgs) {
+          script {
+            helper.doTesting(label, buildType)
+          }
+        }
+      }
+    }
+  }
+}
+
+/**********************************************************************************************************************/
+
+
-- 
GitLab