diff --git a/vars/analysis.groovy b/vars/analysis.groovy
index cd868ea2fdf86a27858cb3fbe5264e9f2c7584f0..d0eab37deff204bd33e81da0cb2d403292614c1d 100644
--- a/vars/analysis.groovy
+++ b/vars/analysis.groovy
@@ -14,8 +14,9 @@ def call() {
   // Run for all -Debug builds of the main job
   script {
     node('Docker') {
-      copyArtifacts filter: "builds.txt", fingerprintArtifacts: true, projectName: parentJob, selector: lastSuccessful(), target: "artefacts"
-      myFile = readFile(env.WORKSPACE+"/artefacts/builds.txt")
+      def JobNameAsDependency = helper.jekinsProjectToDependency(JOB_NAME)
+      def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
+      def myFile = readFile("/home/msk_jenkins/dependency-database/buildnames/${JobNameAsDependencyCleaned}")
       builds = myFile.split("\n").toList()
       def builds_temp = builds.clone()
       builds_temp.each {
diff --git a/vars/buildAndDeploy.groovy b/vars/buildAndDeploy.groovy
index 2d4316dfd6a63d6d1892d42f103832aaaa19258f..9b9e83a8654e714d28e20c89d33173d4e5226243 100644
--- a/vars/buildAndDeploy.groovy
+++ b/vars/buildAndDeploy.groovy
@@ -8,15 +8,6 @@
 // The last optional argument is the list of builds to be run. Format must be "<docker_image_name>-<cmake_build_type>"
 def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> builds) {
 
-  def dependencyJobList = new ArrayList<String>()
-  
-  // Temporary work around for branches builds: Since build artefacts would be used from wrong builds if multiple
-  // branches are build/tested at the same time, we block here until no other branch is currently build/tested.
-  // The resourceToLock initially contains a unique name so the lock will not be effective by default. Only if this
-  // build is the first branches build the name will be changed below into 'branches-build', such that no concurrent
-  // build can occur.
-  def resourceToLock = "not-a-lock-${JOB_NAME}@${BUILD_NUMBER}"
-
   script {
 
     // if branches job type, add parameter BRANCH_UNDER_TEST
@@ -29,16 +20,15 @@ def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> buil
       ])
 
       helper.BUILD_PLAN = new groovy.json.JsonSlurper().parseText(params.BUILD_PLAN)
+      helper.BRANCH_UNDER_TEST = params.BRANCH_UNDER_TEST
      
       println("helper.BUILD_PLAN = ${helper.BUILD_PLAN}")
-      println("params.BRANCH_UNDER_TEST = ${params.BRANCH_UNDER_TEST}")
-  
-      if(params.BRANCH_UNDER_TEST == JOB_NAME) {
-        println("This is the main job in a branches build. Exclude concurrent builds!")
-        resourceToLock = "branches-build"
-      }
+      println("helper.BRANCH_UNDER_TEST = ${helper.BRANCH_UNDER_TEST}")
 
     }
+    else {
+      helper.BRANCH_UNDER_TEST = ""
+    }
     
     node('Docker') {
 
@@ -59,14 +49,10 @@ def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> buil
         def projectCorrected = project.replace('/','%2F')
         def dependency = "${folder}/${projectCorrected}"
         dependencyListCorrected.add(dependency)
-        
-        // generate job name from dependency name
-        def dependencyProjectName = helper.dependencyToJenkinsProject(dependency)
-        dependencyJobList.add(dependencyProjectName)
   
         // obtain list of builds for the dependency
-        copyArtifacts filter: "builds.txt", fingerprintArtifacts: true, projectName: dependencyProjectName, selector: lastSuccessful(), target: "artefacts"
-        myFile = readFile(env.WORKSPACE+"/artefacts/builds.txt")
+        def dependencyCleaned = dependency.replace('/','_')
+        myFile = readFile("/home/msk_jenkins/dependency-database/buildnames/${dependencyCleaned}")
         def depBuilds = myFile.split("\n")
         def curBuilds = builds.clone()
         
@@ -78,29 +64,26 @@ def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> buil
         }
       } // dependencyList.each
 
+      // compute names used below
+      def JobNameAsDependency = helper.jekinsProjectToDependency(JOB_NAME)
+      def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
+
       // publish our list of builds as artefact for our downstream builds
-      writeFile file: "builds.txt", text: builds.join("\n")
-      archiveArtifacts artifacts: "builds.txt", onlyIfSuccessful: false
-      
-      // publish our list of direct dependencies for our downstream builds
-      writeFile file: "dependencyList.txt", text:dependencyListCorrected.join("\n")
-      archiveArtifacts artifacts: "dependencyList.txt", onlyIfSuccessful: false
+      writeFile file: "/home/msk_jenkins/dependency-database/buildnames/${JobNameAsDependencyCleaned}", text: builds.join("\n")
       
       // record our dependencies in central "data base" for explicit dependency triggering
+      writeFile file: "/home/msk_jenkins/dependency-database/reverse/${JobNameAsDependencyCleaned}", text:dependencyListCorrected.join("\n")
       def dependencyListJoined = dependencyListCorrected.join(" ").replace("/","_")
-      def JobNameAsDependency = helper.jekinsProjectToDependency(JOB_NAME)
-      def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
       sh """
         for dependency in ${dependencyListJoined}; do
           mkdir -p "/home/msk_jenkins/dependency-database/forward/\${dependency}"
           echo "${JobNameAsDependency}" > "/home/msk_jenkins/dependency-database/forward/\${dependency}/${JobNameAsDependencyCleaned}"
         done
-        cp "${WORKSPACE}/dependencyList.txt" "/home/msk_jenkins/dependency-database/reverse/${JobNameAsDependencyCleaned}"
       """
       
       println("============ HIER 1")
 
-      if(env.JOB_TYPE == "branches" && params.BRANCH_UNDER_TEST == JOB_NAME) {
+      if(env.JOB_TYPE == "branches" && helper.BRANCH_UNDER_TEST == JOB_NAME) {
         println("============ HIER 2")
         // first branches-typed build: create build plan
         helper.BUILD_PLAN = helper.generateBuildPlan()
@@ -111,121 +94,186 @@ def call(ArrayList<String> dependencyList, String gitUrl, ArrayList<String> buil
     
   } // script
 
-  // form comma-separated list of dependencies as needed for the trigger configuration
-  def dependencies = dependencyJobList.join(',')
-  if(dependencies == "") {
-    dependencies = "Create Docker Images"
-  }
-  
-  lock(resource: resourceToLock) {
+  pipeline {
+    agent none
 
-    pipeline {
-      agent none
+    // setup build trigger
+    // Note: do not trigger automatically by dependencies, since this is implemented explicitly to have more control.
+    // The dependencies are tracked above in the scripts section in a central "database" and used to trigger downstream
+    // build jobs after the build.
+    triggers {
+      pollSCM('* * * * *')
+    }
+    options {
+      //disableConcurrentBuilds()
+      quietPeriod(0)
+      copyArtifactPermission('*')
+      buildDiscarder(logRotator(numToKeepStr: '15', artifactNumToKeepStr: '2'))
+    }
 
-      // setup build trigger
-      // Note: do not trigger automatically by dependencies, since this is implemented explicitly to have more control.
-      // The dependencies are tracked above in the scripts section in a central "database" and used to trigger downstream
-      // build jobs after the build.
-      triggers {
-        pollSCM('* * * * *')
-      }
-      options {
-        //disableConcurrentBuilds()
-        quietPeriod(0)
-        copyArtifactPermission('*')
-        buildDiscarder(logRotator(numToKeepStr: '15', artifactNumToKeepStr: '2'))
-      }
+    stages {
+      // apply changes from project-template
+      stage('preprocess') {
+        steps {
+          script {
 
-      stages {
-        // apply changes from project-template
-        stage('preprocess') {
-          steps {
-            script {
-              node('Docker') {
-                if (env.BRANCH_NAME && env.BRANCH_NAME != '') {
-                  git branch: env.BRANCH_NAME, url: gitUrl
-                } else {
-                  git gitUrl
-                }
-                sh """
-                  git reset --hard
-                  git clean -f -d -x
-                  git config credential.helper store
-                  git remote add project-template "https://github.com/ChimeraTK/project-template" || true
-                  git remote set-url origin `echo ${gitUrl} | sed -e 's_http://doocs-git.desy.de/cgit/_git@doocs-git.desy.de:_' -e 's_/\$__'`
-                  git remote update
-                  git merge -X theirs --no-edit project-template/master && \
-                  git push --all || \
-                  true
-                """
-                // We could also apply the clang-format style here, but this should be discussed first.
-                //  find \( -name '*.cc' -o -name '*.cxx' -o -name '*.c' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' -o -name '*.hxx' -o -name '*.hh' \) -exec clang-format-6.0 -style=file -i \{\} \;
-                //  git commit -a -m "Automated commit: apply clang-format" && git push --all || true
-              }
+            // if this job has no dependency, make sure it gets triggered when docker images are renewed
+            if(dependencyList.isEmpty()) {
+              properties([pipelineTriggers([upstream('Create Docker Images')])])
             }
-          }
-        } // stage preprocess
-        
-        stage('build') {
-          // Run the build stages for all labels + build types in parallel, each in a separate docker container
-          steps {
-            script {
-              parallel builds.collectEntries { ["${it}" : transformIntoStep(dependencyList, it, gitUrl)] }
-            }
-          }
-        } // stage build
-        
-        stage('downstream-builds') {
-          when {
-            expression { return params.BRANCH_UNDER_TEST == JOB_NAME }
-          }
-          steps {
-            script {
-              helper.BUILD_PLAN.each { buildGroup ->
-                parallel buildGroup.collectEntries { ["${it}" : {
-                  def theJob = helper.dependencyToJenkinsProject(it, true)
-                  
-                  def r = build(job: theJob, propagate: false, wait: true, parameters: [
-                    string(name: 'BRANCH_UNDER_TEST', value: params.BRANCH_UNDER_TEST),
-                    string(name: 'BUILD_PLAN', value: groovy.json.JsonOutput.toJson(helper.BUILD_PLAN))
-                  ])
-                  currentBuild.result = hudson.model.Result.combine(hudson.model.Result.fromString(currentBuild.currentResult), hudson.model.Result.fromString(r.result))
-
-                  build(job: theJob.replace("/branches/", "/branches-testing/"), propagate: true, wait: false, parameters: [
-                    string(name: 'BRANCH_UNDER_TEST', value: params.BRANCH_UNDER_TEST),
-                    string(name: 'BUILD_PLAN', value: groovy.json.JsonOutput.toJson(helper.BUILD_PLAN))
-                  ])
-                }] }
+
+            node('Docker') {
+              if (env.BRANCH_NAME && env.BRANCH_NAME != '') {
+                git branch: env.BRANCH_NAME, url: gitUrl
+              } else {
+                git gitUrl
               }
+              sh """
+                git reset --hard
+                git clean -f -d -x
+                git config credential.helper store
+                git remote add project-template "https://github.com/ChimeraTK/project-template" || true
+                git remote set-url origin `echo ${gitUrl} | sed -e 's_http://doocs-git.desy.de/cgit/_git@doocs-git.desy.de:_' -e 's_/\$__'`
+                git remote update
+                git merge -X theirs --no-edit project-template/master && \
+                git push --all || \
+                true
+              """
+              // We could also apply the clang-format style here, but this should be discussed first.
+              //  find \( -name '*.cc' -o -name '*.cxx' -o -name '*.c' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' -o -name '*.hxx' -o -name '*.hh' \) -exec clang-format-6.0 -style=file -i \{\} \;
+              //  git commit -a -m "Automated commit: apply clang-format" && git push --all || true
             }
           }
-        } // stage downstream-builds
-      } // end stages
-      post {
-        failure {
-          emailext body: '$DEFAULT_CONTENT', recipientProviders: [brokenTestsSuspects(), brokenBuildSuspects(), developers()], subject: '[Jenkins] $DEFAULT_SUBJECT', to: env.MAILTO
-          //mattermostSend channel: env.JOB_NAME, color: "danger", message: "Build of ${env.JOB_NAME} failed."
-          //mattermostSend channel: "Jenkins", color: "danger", message: "Build of ${env.JOB_NAME} failed."
         }
-        always {
-          node('Docker') {
-            script {
-              helper.doPublishBuild(builds)
+      } // stage preprocess
+      
+      stage('build') {
+        // Run the build stages for all labels + build types in parallel, each in a separate docker container
+        steps {
+          script {
+            parallel builds.collectEntries { ["${it}" : transformIntoStep(dependencyList, it, gitUrl)] }
+          }
+        }
+      } // stage build
+      
+      stage('downstream-builds') {
+        when {
+          expression { return helper.BRANCH_UNDER_TEST == JOB_NAME }
+        }
+        steps {
+          script {
+            def buildList = helper.BUILD_PLAN.flatten()
+            
+            // buildDone: map of condition variables to signal when build terminates
+            def buildDone = [:]
+            // buildStatus: map of build statuses (true = ok, false = failed)
+            def buildStatus = [:]
+            buildList.each {
+              buildDone[it] = createCondition()
             }
+            buildDone[helper.jekinsProjectToDependency(JOB_NAME)] = createCondition()
+            buildStatus[helper.jekinsProjectToDependency(JOB_NAME)] = true
+            
+            parallel buildList.collectEntries { ["${it}" : {
+              def theJob = helper.dependencyToJenkinsProject(it, true)
+              
+              // signal downstream builds (waiting in parallel) when finished
+              signalAll(buildDone[it]) {
+
+                // wait until all dependencies which are also build here (in parallel) are done
+                def myDeps
+                node {
+                  myDeps = helper.gatherDependenciesDeep([it])
+                }
+                def failedDeps = false
+                myDeps.each { dep ->
+                  // myDeps contains the job itself -> ignore it
+                  if(dep == it) return
+                  // ignore dependencies which are not build within this job
+                  if(!buildDone.containsKey(dep)) return
+                  // if build status not yet set, wait for notification
+                  // Attention: There is a potential race condition if the notification happens between the check of the
+                  // build status and the call to awaitCondition()! Unclear how to solve this. For now we just add a
+                  // sleep between setting the build status and sending the notification below.
+                  if(!buildStatus.containsKey(dep)) {
+                    echo("Waiting for depepdency ${dep}...")
+                    awaitCondition(buildDone[dep])
+                  }
+                  if(!buildStatus[dep]) {
+                    echo("Depepdency ${dep} has failed, not triggering downstream build...")
+                    failedDeps = true
+                  }
+                }
+                if(failedDeps) {
+                  echo("Not proceeding with downstream build due to failed dependencies.")
+                  return
+                }
+
+                // trigger the build and wait until done
+                // Note: propagate=true would abort+fail even if downstream build result is unstable. Also in case of
+                // a failure we first need to set the buildStatus before failing...
+                def r = build(job: theJob, propagate: false, wait: true, parameters: [
+                  string(name: 'BRANCH_UNDER_TEST', value: helper.BRANCH_UNDER_TEST),
+                  string(name: 'BUILD_PLAN', value: groovy.json.JsonOutput.toJson(helper.BUILD_PLAN))
+                ])
+                
+                echo("r.result = ${r.result}")
+                def result =  hudson.model.Result.fromString(r.result)
+                
+                if(result == Result.SUCCESS) {
+                  buildStatus[it] = true
+                  sleep(5) // mitigate race condition, see above
+                  echo("Build result of ${it} is SUCCESS.")
+                }
+                else if(result == Result.UNSTABLE) {
+                  buildStatus[it] = true
+                  sleep(5) // mitigate race condition, see above
+                  unstable(message: "Build result of ${it} is UNSTABLE.")
+                }
+                else {
+                  buildStatus[it] = false
+                  sleep(5) // mitigate race condition, see above
+                  error(message: "Build result of ${it} is FAILURE (or ABORTED etc.).")
+                }
+
+              } // <-- signal downstream projects waiting for this build
+
+              // trigger the test and wait until done
+              // propagate=true is ok here, since we do not do anything further downstream in this parallel stage.
+              build(job: theJob.replace("/branches/", "/branches-testing/"), propagate: true, wait: true, parameters: [
+                string(name: 'BRANCH_UNDER_TEST', value: helper.BRANCH_UNDER_TEST),
+                string(name: 'BUILD_PLAN', value: groovy.json.JsonOutput.toJson(helper.BUILD_PLAN))
+              ])
+
+            }] }
           }
+        }
+      } // stage downstream-builds
+    } // end stages
+    post {
+      failure {
+        emailext body: '$DEFAULT_CONTENT', recipientProviders: [brokenTestsSuspects(), brokenBuildSuspects(), developers()], subject: '[Jenkins] $DEFAULT_SUBJECT', to: env.MAILTO
+        //mattermostSend channel: env.JOB_NAME, color: "danger", message: "Build of ${env.JOB_NAME} failed."
+        //mattermostSend channel: "Jenkins", color: "danger", message: "Build of ${env.JOB_NAME} failed."
+      }
+      always {
+        node('Docker') {
           script {
-            if (currentBuild?.getPreviousBuild()?.result == 'FAILURE') {
-              if (!currentBuild.resultIsWorseOrEqualTo(currentBuild.getPreviousBuild().result)) {
-                //mattermostSend channel: env.JOB_NAME, color: "good", message: "Build of ${env.JOB_NAME} is good again."
-                //mattermostSend channel: "Jenkins", color: "good", message: "Build of ${env.JOB_NAME} is good again."
-              }
+            helper.doPublishBuild(builds)
+          }
+        }
+        script {
+          if (currentBuild?.getPreviousBuild()?.result == 'FAILURE') {
+            if (!currentBuild.resultIsWorseOrEqualTo(currentBuild.getPreviousBuild().result)) {
+              //mattermostSend channel: env.JOB_NAME, color: "good", message: "Build of ${env.JOB_NAME} is good again."
+              //mattermostSend channel: "Jenkins", color: "good", message: "Build of ${env.JOB_NAME} is good again."
             }
           }
-        } // end always
-      } // end post
-    } // end pipeline
-  
-  } // end lock
+        }
+      } // end always
+    } // end post
+  } // end pipeline
+
 }
 
 /**********************************************************************************************************************/
diff --git a/vars/buildDoocsLibrary.groovy b/vars/buildDoocsLibrary.groovy
index 2af9396c834ec30f5c38eead83d9cce2ce48fb6d..524913fb985ff1a8e3435364b94224038bf38a98 100644
--- a/vars/buildDoocsLibrary.groovy
+++ b/vars/buildDoocsLibrary.groovy
@@ -19,13 +19,14 @@ def call(String libraryName, ArrayList<String> dependencyList) {
 
   script {
     node('Docker') {
+      def JobNameAsDependency = JOB_NAME
+      def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
+
       // publish our list of builds as artefact for our downstream builds
-      writeFile file: "builds.txt", text: builds.join("\n")
-      archiveArtifacts artifacts: "builds.txt", onlyIfSuccessful: false
+      writeFile file: "/home/msk_jenkins/dependency-database/buildnames/${JobNameAsDependencyCleaned}", text: builds.join("\n")
  
       // publish our list of direct dependencies for our downstream builds
-      writeFile file: "dependencyList.txt", text: dependencyList.join("\n")
-      archiveArtifacts artifacts: "dependencyList.txt", onlyIfSuccessful: false
+      writeFile file: "/home/msk_jenkins/dependency-database/reverse/${JobNameAsDependencyCleaned}", text: dependencyList.join("\n")
 
       // form comma-separated list of dependencies as needed for the trigger configuration
       dependencies = dependencyList.join(',')
@@ -35,14 +36,11 @@ def call(String libraryName, ArrayList<String> dependencyList) {
       
       // record our dependencies in central "data base" for explicit dependency triggering
       def dependencyListJoined = dependencyList.join(" ").replace("/","_")
-      def JobNameAsDependency = JOB_NAME
-      def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
       sh """
         for dependency in ${dependencyListJoined}; do
           mkdir -p "/home/msk_jenkins/dependency-database/forward/\${dependency}"
           echo "${JobNameAsDependency}" > "/home/msk_jenkins/dependency-database/forward/\${dependency}/${JobNameAsDependencyCleaned}"
         done
-        cp "${WORKSPACE}/dependencyList.txt" "/home/msk_jenkins/dependency-database/reverse/${JobNameAsDependencyCleaned}"
       """
 
     }
@@ -58,7 +56,7 @@ def call(String libraryName, ArrayList<String> dependencyList) {
     }
     options {
       disableConcurrentBuilds()
-      quietPeriod(180)
+      //quietPeriod(180)
       copyArtifactPermission('*')
       buildDiscarder(logRotator(numToKeepStr: '30', artifactNumToKeepStr: '10'))
     }
@@ -98,7 +96,7 @@ def transformIntoStep(String libraryName, ArrayList<String> dependencyList, Stri
           git gitUrl
         }
         // we need root access inside the container
-        def dockerArgs = "-u 0 --privileged"
+        def dockerArgs = "-u 0 --privileged -v /home/msk_jenkins:/home/msk_jenkins"
         docker.image("builder:${label}").inside(dockerArgs) {
           script {
             sh '''
@@ -107,6 +105,9 @@ def transformIntoStep(String libraryName, ArrayList<String> dependencyList, Stri
             '''
             helper.doDependencyArtefacts(dependencyList, label, buildType)
 
+            // Compute name where to put the install artifact
+            def installArtifactFile = helper.getArtefactName(false, "install.tgz", label, buildType, JOB_NAME)
+
             // We don't care that in gitlab the repository structure is different. Those project only work with meson builds anyway, and form them the path does not matter.
             sh """
               mkdir -p /export/doocs/library/${libraryName}
@@ -161,9 +162,8 @@ def transformIntoStep(String libraryName, ArrayList<String> dependencyList, Stri
               touch mv /scratch/artefact.list
               mv /scratch/artefact.list /scratch/dependencies.${JOBNAME_CLEANED}.list
               echo /scratch/dependencies.${JOBNAME_CLEANED}.list >> export.list.installed
-              sudo -H -u msk_jenkins tar cf install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz --files-from export.list.installed --use-compress-program="pigz -9 -p32"
+              sudo -H -u msk_jenkins tar cf ${installArtifactFile} --files-from export.list.installed --use-compress-program="pigz -9 -p32"
             """
-            archiveArtifacts artifacts: "install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz", onlyIfSuccessful: false
           }
         }
       }
diff --git a/vars/helper.groovy b/vars/helper.groovy
index 89256a1cad588c711babc9a479b5d98c6da79b80..6c791edb31388a834a8fe92a279c16f07c3be1a1 100644
--- a/vars/helper.groovy
+++ b/vars/helper.groovy
@@ -5,12 +5,17 @@
 ***********************************************************************************************************************/
 
 ArrayList<String> BUILD_PLAN = []
+String BRANCH_UNDER_TEST = ""
 
 /**********************************************************************************************************************/
 
 // helper function, convert dependency name as listed in the .jenkinsfile into a Jenkins project name
 def dependencyToJenkinsProject(String dependency, boolean forceBranches = false) {
   def dependencyProjectName = dependency
+  if(dependency.contains('@')) {
+    // get rid of build number if specified
+    dependency = dependency.split('@')[0]
+  }
   def (dependencyFolder, dependencyProject) = dependencyProjectName.split('/',2)
   dependencyProject = dependencyProject.replace('/','%2F')
   def jobType = env.JOB_TYPE
@@ -24,19 +29,19 @@ def dependencyToJenkinsProject(String dependency, boolean forceBranches = false)
     println("Special branches check: ${dependency}")
   
     println(JOB_NAME)
-    println(params.BRANCH_UNDER_TEST)
+    println(BRANCH_UNDER_TEST)
     println(BUILD_PLAN)
     
     println(BUILD_PLAN.flatten().size())
     println(BUILD_PLAN.flatten().indexOf(dependency))
-    def dependencyUnderTest = jekinsProjectToDependency(params.BRANCH_UNDER_TEST)
+    def dependencyUnderTest = jekinsProjectToDependency(BRANCH_UNDER_TEST)
     println("dependencyUnderTest = ${dependencyUnderTest}")
 
-    if(!forceBranches && (BUILD_PLAN.flatten().indexOf(dependency) < 0 || params.BRANCH_UNDER_TEST == JOB_NAME) && dependencyUnderTest != dependency) {
+    if(!forceBranches && (BUILD_PLAN.flatten().indexOf(dependency) < 0 || BRANCH_UNDER_TEST == JOB_NAME) && dependencyUnderTest != dependency) {
       jobType = "fasttrack"
     }
-    else {
-      def (butFolder, butType, butProject, butBranch) = params.BRANCH_UNDER_TEST.split('/')
+    else { 
+      def (butFolder, butType, butProject, butBranch) = BRANCH_UNDER_TEST.split('/')
       if(butFolder == dependencyFolder && butType == jobType && butProject == dependencyProject) {
         branch = butBranch
         println("dependency matches branch under test.")
@@ -126,6 +131,84 @@ def generateBuildPlan() {
 
 /**********************************************************************************************************************/
 
+def getArtefactName(boolean forReading, String basename, String label, String buildType, String dependency = jekinsProjectToDependency(JOB_NAME)) {
+  // Compute name for artifact in local file storage on the build node. This saves time and is more flexible than using
+  // Jenkins archiveArtifact/copyArtifact, but won't work when using multiple build nodes.
+  //
+  // "basename" is the filename with extension but without path. It should not contain any job/build specific parts, as
+  // this will be taken care of in the path. Typical values are "build" and "install".
+  //
+  // This function also creates the directory if not yet existing.
+
+  def dependencyNoBuildNo = dependency
+  if(dependencyNoBuildNo.contains('@')) {
+    // get rid of build number if specified
+    dependencyNoBuildNo = dependency.split('@')[0]
+  }
+  
+  def jobName = dependencyToJenkinsProject(dependencyNoBuildNo)
+  def JOBNAME_CLEANED=jobName.replace('/','_')
+  
+  println("getArtefactName(${forReading}, ${basename}, ${label}, ${buildType}, ${dependency})")
+
+  println("jobName = ${jobName}")
+  println("JOBNAME_CLEANED = ${JOBNAME_CLEANED}")
+  
+  def path = "/home/msk_jenkins/artifacts/${JOBNAME_CLEANED}/${label}/${buildType}"
+  
+  def buildNumer = null
+  if(forReading) {
+
+    def upstreamCause = currentBuild.rawBuild.getCause(Cause.UpstreamCause)
+    println("upstreamCause = ${upstreamCause}")
+    if(upstreamCause) {
+      println("upstreamCause.getUpstreamProject() = ${upstreamCause.getUpstreamProject()}")
+    }
+
+    if(dependency.contains('@')) {
+      buildNumber = dependency.split('@',2)[1]
+      println("Build number from dependency name!")
+    }
+    else if(upstreamCause && upstreamCause.getUpstreamProject() == jobName) {
+      // looking for build name of job which triggered us
+      buildNumber = upstreamCause.getUpstreamBuild()
+      println("Build number from upstream trigger!")
+    }
+    else {
+      // determine latest available build
+      println("path = ${path}")
+      upstreamCause = null // the object may not be serializable which causes an exception when executing sh
+      buildNumber = sh ( script: "ls ${path} | sort -n | tail -n1", returnStdout: true ).trim()
+      println("Build number from latest build!")    
+    }
+  }
+  else {
+    buildNumber = BUILD_NUMBER
+  }
+
+  println("buildNumber = ${buildNumber}")
+  
+  path = path+"/"+buildNumber
+
+  sh """
+    mkdir -p ${path}
+    chown msk_jenkins:msk_jenkins -R ${path}
+  """
+
+  println("path = ${path}")
+
+  return "${path}/${basename}"
+}
+
+/**********************************************************************************************************************/
+
+def getBuildNumberFromArtefactFileName(String fileName) {
+  def components = fileName.split('/')
+  return components[components.size()-2]
+}
+
+/**********************************************************************************************************************/
+
 def doBuildAndDeploy(ArrayList<String> dependencyList, String label, String buildType, String gitUrl) {
 
   // prepare source directory and dependencies
@@ -266,17 +349,27 @@ def doDependencyArtefacts(ArrayList<String> dependencyList, String label, String
       obtainedArtefacts.add(dependency)
 
       // download the artefact
-      copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts"
+      //copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts"
 
       // unpack artefact
+      def theFile = getArtefactName(true, "install.tgz", label, buildType, it)
+      println("theFile = ${theFile}")
       sh """
-        tar xf \"artefacts/install-${dependency_cleaned}-${label}-${buildType}.tgz\" -C / --keep-directory-symlink --use-compress-program="pigz -9 -p32"
+        #tar xf \"artefacts/install-${dependency_cleaned}-${label}-${buildType}.tgz\" -C / --keep-directory-symlink --use-compress-program="pigz -9 -p32"
+        tar xf \"${theFile}\" -C / --keep-directory-symlink --use-compress-program="pigz -9 -p32"
       """
 
       // keep track of dependencies to download - used when dependees need to resolve our dependencies
+      def depBuildNo = getBuildNumberFromArtefactFileName(theFile)
+      println("it = ${it}")
+      println("depBuildNo = ${depBuildNo}")
       sh """
         touch /scratch/artefact.list
-        echo "${it}" >> /scratch/artefact.list
+        if [[ "${it}" == *"@"* ]]; then
+          echo "${it}" >> /scratch/artefact.list
+        else
+          echo "${it}@${depBuildNo}" >> /scratch/artefact.list
+        fi
       """
 
       // process dependencies of the dependency we just downloaded
@@ -308,14 +401,17 @@ def doBuilddirArtefact(String label, String buildType) {
   
     def buildJob = env.BUILD_JOB
     def buildJob_cleaned = buildJob.replace('/','_')
-    copyArtifacts filter: "build-${buildJob_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${buildJob}", selector: lastSuccessful(), target: "artefacts"
+    //copyArtifacts filter: "build-${buildJob_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${buildJob}", selector: lastSuccessful(), target: "artefacts"
+    
+    def theFile = getArtefactName(true, "build.tgz", label, buildType)
 
     // Unpack artefact into the Docker system root (should only write files to /scratch, which is writable by msk_jenkins).
     // Then obtain artefacts of dependencies (from /scratch/artefact.list)
     sh """
-      for a in artefacts/build-*-${label}-${buildType}.tgz ; do
-        sudo -H -E -u msk_jenkins tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32"
-      done
+      #for a in artefacts/build-*-${label}-${buildType}.tgz ; do
+      #  sudo -H -E -u msk_jenkins tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32"
+      #done
+      sudo -H -E -u msk_jenkins tar xf \"${theFile}\" -C / --use-compress-program="pigz -9 -p32"
 
       touch /scratch/artefact.list
       cp /scratch/artefact.list ${WORKSPACE}/artefact.list
@@ -325,18 +421,24 @@ def doBuilddirArtefact(String label, String buildType) {
       if( it != "" ) {
         def dependency = dependencyToJenkinsProject(it)
         def dependency_cleaned = dependency.replace('/','_')
-        copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts"
+        // copyArtifacts filter: "install-${dependency_cleaned}-${label}-${buildType}.tgz", fingerprintArtifacts: true, projectName: "${dependency}", selector: lastSuccessful(), target: "artefacts"
+
+        theFile = getArtefactName(true, "install.tgz", label, buildType, it)
+        sh """
+          tar xf \"${theFile}\" -C / --use-compress-program="pigz -9 -p32"
+        """
+
       }
     }
   }
 
   // unpack artefacts of dependencies into the Docker system root
   sh """
-    if ls artefacts/install-*-${label}-${buildType}.tgz 1>/dev/null 2>&1; then
-      for a in artefacts/install-*-${label}-${buildType}.tgz ; do
-        tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32"
-      done
-    fi
+    #if ls artefacts/install-*-${label}-${buildType}.tgz 1>/dev/null 2>&1; then
+    #  for a in artefacts/install-*-${label}-${buildType}.tgz ; do
+    #    tar xf \"\${a}\" -C / --use-compress-program="pigz -9 -p32"
+    #  done
+    #fi
   """
     
   // fix ownership
@@ -386,11 +488,12 @@ EOF
     """
   }
   script {
-    // generate and archive artefact from build directory (used for the analysis job)
+    // generate and archive artefact from build directory (used e.g. for the analysis job)
+    def theFile = getArtefactName(false, "build.tgz", label, buildType)
     sh """
-      sudo -H -E -u msk_jenkins tar cf build-${JOBNAME_CLEANED}-${label}-${buildType}.tgz /scratch --use-compress-program="pigz -9 -p32"
+      sudo -H -E -u msk_jenkins tar cf \"${theFile}\" /scratch --use-compress-program="pigz -9 -p32"
     """
-    archiveArtifacts artifacts: "build-${JOBNAME_CLEANED}-${label}-${buildType}.tgz", onlyIfSuccessful: false
+    //archiveArtifacts artifacts: "build-${JOBNAME_CLEANED}-${label}-${buildType}.tgz", onlyIfSuccessful: false
   }
 }
 
@@ -510,6 +613,7 @@ def doDeploy(String label, String buildType) {
 
   // Install, but redirect files into the install directory (instead of installing into the system)
   // Generate tar ball of install directory - this will be the artefact used by our dependents
+  def theFile = getArtefactName(false, "install.tgz", label, buildType)
   sh """
     cd /scratch/build-${JOBNAME_CLEANED}
     sudo -H -E -u msk_jenkins bash -c 'DESTDIR=../install ninja install'
@@ -519,11 +623,9 @@ def doDeploy(String label, String buildType) {
     if [ -e /scratch/artefact.list ]; then
       cp /scratch/artefact.list scratch/dependencies.${JOBNAME_CLEANED}.list
     fi
-    sudo -H -E -u msk_jenkins tar cf ${WORKSPACE}/install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz . --use-compress-program="pigz -9 -p32"
+    #sudo -H -E -u msk_jenkins tar cf ${WORKSPACE}/install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz . --use-compress-program="pigz -9 -p32"
+    sudo -H -E -u msk_jenkins tar cf ${theFile} . --use-compress-program="pigz -9 -p32"
   """
-  
-  // Archive the artefact tar ball (even if other branches of this build failed - TODO: do we really want to do that?)
-  archiveArtifacts artifacts: "install-${JOBNAME_CLEANED}-${label}-${buildType}.tgz", onlyIfSuccessful: false
 
   // Downstream projects are triggered in buildAndDeply of the first project only.
   if(env.JOB_TYPE == "branches") return
diff --git a/vars/testing.groovy b/vars/testing.groovy
index a6b46bd46fd50e055150ce43ed98740ac23c9e9a..ac2953e1dd9fee1a9b569ba97fc5fe8e9850f518 100644
--- a/vars/testing.groovy
+++ b/vars/testing.groovy
@@ -55,8 +55,9 @@ def call() {
           script {
             node('Docker') {
               // fetch list of build types
-              copyArtifacts filter: "builds.txt", fingerprintArtifacts: true, projectName: BUILD_JOB, selector: lastSuccessful(), target: "artefacts"  
-              def myFile = readFile(env.WORKSPACE+"/artefacts/builds.txt")
+              def JobNameAsDependency = helper.jekinsProjectToDependency(JOB_NAME)
+              def JobNameAsDependencyCleaned = JobNameAsDependency.replace("/","_")
+              def myFile = readFile("/home/msk_jenkins/dependency-database/buildnames/${JobNameAsDependencyCleaned}")
               builds = myFile.split("\n")
             }
           }
@@ -101,7 +102,7 @@ def transformIntoStep(String buildName) {
     stage(buildName) {
       node('Docker') {
         // we need root access inside the container and access to the dummy pcie devices of the host
-        def dockerArgs = "-u 0 --privileged --device=/dev/mtcadummys0 --device=/dev/mtcadummys1 --device=/dev/mtcadummys2 --device=/dev/mtcadummys3 --device=/dev/llrfdummys4 --device=/dev/noioctldummys5 --device=/dev/pcieunidummys6 -v /var/run/lock/mtcadummy:/var/run/lock/mtcadummy -v /opt/matlab_R2016b:/opt/matlab_R2016b"
+        def dockerArgs = "-u 0 --privileged --device=/dev/mtcadummys0 --device=/dev/mtcadummys1 --device=/dev/mtcadummys2 --device=/dev/mtcadummys3 --device=/dev/llrfdummys4 --device=/dev/noioctldummys5 --device=/dev/pcieunidummys6 -v /var/run/lock/mtcadummy:/var/run/lock/mtcadummy -v /opt/matlab_R2016b:/opt/matlab_R2016b -v /home/msk_jenkins:/home/msk_jenkins"
         docker.image("builder:${label}").inside(dockerArgs) {
           script {
             helper.doTesting(label, buildType)