diff --git a/CMakeModules/prepare_asapo.cmake b/CMakeModules/prepare_asapo.cmake
index 17681e5bd0645f497429addf73575c4728c531d6..99e4f0a58fa1964e7eeb44373b1277653e798a25 100644
--- a/CMakeModules/prepare_asapo.cmake
+++ b/CMakeModules/prepare_asapo.cmake
@@ -5,12 +5,22 @@ function(prepare_asapo)
     get_target_property(AUTHORIZER_FULLPATH asapo-authorizer EXENAME)
     get_target_property(BROKER_FULLPATH asapo-broker EXENAME)
     set(WORK_DIR ${CMAKE_CURRENT_BINARY_DIR})
+
+    if(NOT DEFINED RECEIVER_USE_CACHE)
+        set(RECEIVER_USE_CACHE true)
+    endif()
+
+    if(NOT DEFINED RECEIVER_WRITE_TO_DISK)
+        set(RECEIVER_WRITE_TO_DISK true)
+    endif()
+
     if (WIN32)
-        configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/receiver.json.tpl.win receiver.json.tpl COPYONLY)
+        configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/receiver.json.tpl.win.in receiver.json.tpl @ONLY)
     else()
-        configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/receiver.json.tpl.lin receiver.json.tpl COPYONLY)
+        configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/receiver.json.tpl.lin.in receiver.json.tpl @ONLY)
     endif()
-    configure_file(${CMAKE_SOURCE_DIR}/config/nomad/receiver.nmd.in  receiver.nmd @ONLY)
+
+      configure_file(${CMAKE_SOURCE_DIR}/config/nomad/receiver.nmd.in  receiver.nmd @ONLY)
     configure_file(${CMAKE_SOURCE_DIR}/config/nomad/discovery.nmd.in  discovery.nmd @ONLY)
     configure_file(${CMAKE_SOURCE_DIR}/config/nomad/authorizer.nmd.in  authorizer.nmd @ONLY)
     configure_file(${CMAKE_SOURCE_DIR}/config/nomad/broker.nmd.in  broker.nmd @ONLY)
diff --git a/config/bamboo/bamboo.java b/config/bamboo/bamboo.java
new file mode 100644
index 0000000000000000000000000000000000000000..df6225a699647e503c535ab50eebe12f4081c1b6
--- /dev/null
+++ b/config/bamboo/bamboo.java
@@ -0,0 +1,407 @@
+import com.atlassian.bamboo.specs.api.BambooSpec;
+import com.atlassian.bamboo.specs.api.builders.BambooKey;
+import com.atlassian.bamboo.specs.api.builders.BambooOid;
+import com.atlassian.bamboo.specs.api.builders.Variable;
+import com.atlassian.bamboo.specs.api.builders.applink.ApplicationLink;
+import com.atlassian.bamboo.specs.api.builders.notification.Notification;
+import com.atlassian.bamboo.specs.api.builders.permission.PermissionType;
+import com.atlassian.bamboo.specs.api.builders.permission.Permissions;
+import com.atlassian.bamboo.specs.api.builders.permission.PlanPermissions;
+import com.atlassian.bamboo.specs.api.builders.plan.Job;
+import com.atlassian.bamboo.specs.api.builders.plan.Plan;
+import com.atlassian.bamboo.specs.api.builders.plan.PlanIdentifier;
+import com.atlassian.bamboo.specs.api.builders.plan.Stage;
+import com.atlassian.bamboo.specs.api.builders.plan.artifact.Artifact;
+import com.atlassian.bamboo.specs.api.builders.plan.branches.BranchCleanup;
+import com.atlassian.bamboo.specs.api.builders.plan.branches.PlanBranchManagement;
+import com.atlassian.bamboo.specs.api.builders.plan.configuration.ConcurrentBuilds;
+import com.atlassian.bamboo.specs.api.builders.project.Project;
+import com.atlassian.bamboo.specs.api.builders.repository.VcsChangeDetection;
+import com.atlassian.bamboo.specs.api.builders.requirement.Requirement;
+import com.atlassian.bamboo.specs.builders.notification.FirstFailedJobNotification;
+import com.atlassian.bamboo.specs.builders.notification.ResponsibleRecipient;
+import com.atlassian.bamboo.specs.builders.repository.bitbucket.server.BitbucketServerRepository;
+import com.atlassian.bamboo.specs.builders.repository.viewer.BitbucketServerRepositoryViewer;
+import com.atlassian.bamboo.specs.builders.task.ArtifactDownloaderTask;
+import com.atlassian.bamboo.specs.builders.task.CheckoutItem;
+import com.atlassian.bamboo.specs.builders.task.CommandTask;
+import com.atlassian.bamboo.specs.builders.task.DockerBuildImageTask;
+import com.atlassian.bamboo.specs.builders.task.DockerPushImageTask;
+import com.atlassian.bamboo.specs.builders.task.DockerRunContainerTask;
+import com.atlassian.bamboo.specs.builders.task.DownloadItem;
+import com.atlassian.bamboo.specs.builders.task.InjectVariablesTask;
+import com.atlassian.bamboo.specs.builders.task.ScriptTask;
+import com.atlassian.bamboo.specs.builders.task.TestParserTask;
+import com.atlassian.bamboo.specs.builders.task.VcsCheckoutTask;
+import com.atlassian.bamboo.specs.builders.trigger.BitbucketServerTrigger;
+import com.atlassian.bamboo.specs.model.task.InjectVariablesScope;
+import com.atlassian.bamboo.specs.model.task.ScriptTaskProperties;
+import com.atlassian.bamboo.specs.model.task.TestParserTaskProperties;
+import com.atlassian.bamboo.specs.util.BambooServer;
+
+@BambooSpec
+public class PlanSpec {
+
+    public Plan plan() {
+        final Plan plan = new Plan(new Project()
+                .oid(new BambooOid("yfhea8w6cete"))
+                .key(new BambooKey("BHID2"))
+                .name("ASAPO"),
+            "ASAPO Main",
+            new BambooKey("BT"))
+            .oid(new BambooOid("yf7p2niyiryb"))
+            .description("Build ASAPO software and run all tests")
+            .pluginConfigurations(new ConcurrentBuilds()
+                    .useSystemWideDefault(false)
+                    .maximumNumberOfConcurrentBuilds(3))
+            .stages(new Stage("Build Debug And Test")
+                    .jobs(new Job("Linux - Debug",
+                            new BambooKey("BUILD"))
+                            .description("Builds ASAPO and tests")
+                            .artifacts(new Artifact()
+                                    .name("libcommon.so")
+                                    .copyPattern("libcommon.so")
+                                    .location("build/common/cpp/"),
+                                new Artifact()
+                                    .name("inotify-event-detector-cpp")
+                                    .copyPattern("inotify-event-detector-cpp")
+                                    .location("build/producer/inotify-event-detector-cpp/"),
+                                new Artifact()
+                                    .name("Documentation")
+                                    .copyPattern("**/*")
+                                    .location("doxygen/html"),
+                                new Artifact()
+                                    .name("libproducer-api.so")
+                                    .copyPattern("libproducer-api.so")
+                                    .location("build/producer/api/"),
+                                new Artifact()
+                                    .name("Coverage-Producer")
+                                    .copyPattern("**/*")
+                                    .location("build/coverage-producer-api"),
+                                new Artifact()
+                                    .name("Coverage-Worker")
+                                    .copyPattern("**/*")
+                                    .location("build/coverage-hidra2-worker"),
+                                new Artifact()
+                                    .name("Coverage-Broker")
+                                    .copyPattern("coverage.html")
+                                    .location("build/broker"))
+                            .tasks(new VcsCheckoutTask()
+                                    .description("Checkout Default Repository")
+                                    .checkoutItems(new CheckoutItem().defaultRepository())
+                                    .cleanCheckout(true),
+                                new CommandTask()
+                                    .description("get submodules")
+                                    .executable("bash")
+                                    .argument("-c \"git submodule init && git submodule update\""),
+                                new CommandTask()
+                                    .description("recreate build folder")
+                                    .executable("bash")
+                                    .argument("-c \"mkdir build\""),
+                                new CommandTask()
+                                    .description("get go modules for broker")
+                                    .executable("bash")
+                                    .argument("-c \"go get ./...\" || echo go modules not updated")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go:${bamboo.build.working.directory}/broker:${bamboo.build.working.directory}/common/go")
+                                    .workingSubdirectory("broker"),
+                                new CommandTask()
+                                    .description("get go modules for discovery service")
+                                    .executable("bash")
+                                    .argument("-c \"go get ./...\" || echo go modules not updated")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go:${bamboo.build.working.directory}/discovery:${bamboo.build.working.directory}/common/go")
+                                    .workingSubdirectory("discovery"),
+                                new CommandTask()
+                                    .description("build")
+                                    .executable("bash")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go GOROOT=/usr/local/go")
+                                    .workingSubdirectory("build"),
+                                new CommandTask()
+                                    .description("doxygen")
+                                    .enabled(false)
+                                    .executable("Doxygen")
+                                    .argument("doxygen.ini"),
+                                new CommandTask()
+                                    .description("Make documentation")
+                                    .executable("bash")
+                                    .argument("-c \"ls -la .. && make documentation && ls -la ../doxygen/html\"")
+                                    .workingSubdirectory("build"),
+                                new CommandTask()
+                                    .description("ls after build")
+                                    .executable("bash")
+                                    .argument("-c \"pwd; ls -la\"")
+                                    .workingSubdirectory("build"),
+                                new DockerRunContainerTask()
+                                    .enabled(false)
+                                    .imageName("docker.io/utgarda/debian-build-essentials-git-cmake:latest")
+                                    .serviceURLPattern("http://localhost:${docker.port}")
+                                    .containerCommand("cmake")
+                                    .containerWorkingDirectory("/data")
+
+                                    .clearVolumeMappings()
+                                    .appendVolumeMapping("${bamboo.working.directory}", "/data"),
+                                new CommandTask()
+                                    .description("run all tests")
+                                    .executable("bash")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/ctest --no-compress-output -T Test -V\"")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go")
+                                    .workingSubdirectory("build"))
+                            .finalTasks(new CommandTask()
+                                    .description("convert tests")
+                                    .executable("bash")
+                                    .argument("-c \"python3 ../3d_party/ctest_junit_convert/convert.py -x ../3d_party/ctest_junit_convert/conv.xsl -t . > Testing/JUnitTestResults.xml\"")
+                                    .workingSubdirectory("build"),
+                                new TestParserTask(TestParserTaskProperties.TestType.JUNIT)
+                                    .resultDirectories("**/Testing/*.xml"))
+                            .requirements(new Requirement("system.docker.executable")),
+                        new Job("Windows - Debug",
+                            new BambooKey("BOW"))
+                            .description("Builds ASAPO and tests")
+                            .tasks(new VcsCheckoutTask()
+                                    .description("Checkout Default Repository")
+                                    .checkoutItems(new CheckoutItem().defaultRepository())
+                                    .cleanCheckout(true),
+                                new ScriptTask()
+                                    .description("Create build folder")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("mkdir build"),
+                                new ScriptTask()
+                                    .description("Go modules")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("cd discovery\nSET GOPATH=\"c:\\GoPath\";\"${bamboo.build.working.directory}\\discovery\";\"${bamboo.build.working.directory}\\common\\go\"\ngo get ./... || echo go modules not updated\ncd ../broker\nSET GOPATH=\"c:\\GoPath\";\"${bamboo.build.working.directory}\\broker\";\"${bamboo.build.working.directory}\\common\\go\"\ngo get ./... || echo go modules not updated"),
+                                new ScriptTask()
+                                    .description("build with CMake")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
+                                    .workingSubdirectory("build"),
+                                new ScriptTask()
+                                    .description("Run tests")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("\"c:\\Program Files\\CMake\\bin\\ctest\" -C Debug -T test --no-compress-output -V")
+                                    .workingSubdirectory("build"))
+                            .finalTasks(new ScriptTask()
+                                    .inlineBody("\"c:\\Program Files (x86)\\Python\\python.exe\" ..\\3d_party\\ctest_junit_convert\\convert.py -x ..\\3d_party\\ctest_junit_convert/conv.xsl -t . > Testing\\JUnitTestResults.xml")
+                                    .workingSubdirectory("build"),
+                                new TestParserTask(TestParserTaskProperties.TestType.JUNIT)
+                                    .resultDirectories("**/Testing/*.xml"))
+                            .requirements(new Requirement("system.builder.devenv.Visual Studio 2015 CE"))),
+                new Stage("Build Release")
+                    .jobs(new Job("Windows - Release",
+                            new BambooKey("WR"))
+                            .artifacts(new Artifact()
+                                    .name("Dummy Producer Windows")
+                                    .copyPattern("dummy-data-producer.exe")
+                                    .location("build/examples/producer/dummy-data-producer")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("File Monitor Producer Windows")
+                                    .copyPattern("asapo-eventmon-producer.exe")
+                                    .location("build/producer/event_monitor_producer")
+                                    .shared(true))
+                            .tasks(new VcsCheckoutTask()
+                                    .description("Checkout Default Repository")
+                                    .checkoutItems(new CheckoutItem().defaultRepository())
+                                    .cleanCheckout(true),
+                                new ScriptTask()
+                                    .description("Create build folder")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("mkdir build"),
+                                new ScriptTask()
+                                    .description("build with CMake")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=OFF -DBUILD_DOCS=OFF -DBUILD_INTEGRATION_TESTS=OFF -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
+                                    .workingSubdirectory("build"),
+                                new ScriptTask()
+                                    .description("Run tests")
+                                    .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
+                                    .inlineBody("\"c:\\Program Files\\CMake\\bin\\ctest\" -C Debug -T test --no-compress-output -V")
+                                    .workingSubdirectory("build"),
+                                new ScriptTask()
+                                    .description("Push binary")
+                                    .enabled(false)
+                                    .interpreter(ScriptTaskProperties.Interpreter.WINDOWS_POWER_SHELL)
+                                    .inlineBody("git checkout -B ${bamboo.planRepository.branchName}\ngit push -u origin ${bamboo.planRepository.branchName}\ncopy ../build/examples/producer/dummy-data-producer/dummy-producer.exe .\ngit add -A .\ngit commit -m \"update from ${bamboo.buildNumber}\"\ngit push")
+                                    .workingSubdirectory("bin"))
+                            .finalTasks(new ScriptTask()
+                                    .inlineBody("\"c:\\Program Files (x86)\\Python\\python.exe\" ..\\3d_party\\ctest_junit_convert\\convert.py -x ..\\3d_party\\ctest_junit_convert/conv.xsl -t . > Testing\\JUnitTestResults.xml")
+                                    .workingSubdirectory("build"),
+                                new TestParserTask(TestParserTaskProperties.TestType.JUNIT)
+                                    .resultDirectories("**/Testing/*.xml"))
+                            .requirements(new Requirement("system.builder.devenv.Visual Studio 2015 CE")),
+                        new Job("Linux - Release",
+                            new BambooKey("LIN"))
+                            .artifacts(new Artifact()
+                                    .name("Receiver")
+                                    .copyPattern("receiver")
+                                    .location("build_release/receiver")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("Authorizer")
+                                    .copyPattern("asapo-authorizer")
+                                    .location("build_release/authorizer")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("Discovery")
+                                    .copyPattern("asapo-discovery")
+                                    .location("build_release/discovery")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("Broker")
+                                    .copyPattern("asapo-broker")
+                                    .location("build_release/broker")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("Dummy Producer Linux")
+                                    .copyPattern("dummy-data-producer")
+                                    .location("build_release/examples/producer/dummy-data-producer")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("File Monitor Producer Linux")
+                                    .copyPattern("asapo-eventmon-producer")
+                                    .location("build_release/producer/event_monitor_producer")
+                                    .shared(true),
+                                new Artifact()
+                                    .name("Worker Linux")
+                                    .copyPattern("getnext_broker")
+                                    .location("build_release/examples/worker/getnext_broker")
+                                    .shared(true))
+                            .tasks(new VcsCheckoutTask()
+                                    .checkoutItems(new CheckoutItem().defaultRepository())
+                                    .cleanCheckout(true),
+                                new CommandTask()
+                                    .description("recreate build folder")
+                                    .executable("bash")
+                                    .argument("-c \"mkdir build_release\""),
+                                new CommandTask()
+                                    .description("build")
+                                    .executable("bash")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Release  -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go GOROOT=/usr/local/go")
+                                    .workingSubdirectory("build_release"),
+                                new CommandTask()
+                                    .description("run all tests")
+                                    .executable("bash")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/ctest --no-compress-output -T Test -V\"")
+                                    .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go")
+                                    .workingSubdirectory("build"))
+                            .finalTasks(new CommandTask()
+                                    .description("convert tests")
+                                    .executable("bash")
+                                    .argument("-c \"python3 ../3d_party/ctest_junit_convert/convert.py -x ../3d_party/ctest_junit_convert/conv.xsl -t . > Testing/JUnitTestResults.xml\"")
+                                    .workingSubdirectory("build"),
+                                new TestParserTask(TestParserTaskProperties.TestType.JUNIT)
+                                    .resultDirectories("**/Testing/*.xml"))),
+                new Stage("Create Docker Contaners")
+                    .jobs(new Job("Create Docker",
+                            new BambooKey("DOC"))
+                            .tasks(new VcsCheckoutTask()
+                                    .checkoutItems(new CheckoutItem().defaultRepository()),
+                                new ScriptTask()
+                                    .description("prepare version file")
+                                    .inlineBody("set -e\ntag=`git describe --tags --dirty`\nif [ \"${bamboo.planRepository.branchName}\" = \"master\" ]; then\necho asapo_tag=$tag > version\necho asapo_dev=\"\" >> version\nelif [ \"${bamboo.planRepository.branchName}\" = \"develop\" ]; then\necho asapo_tag=${bamboo.planRepository.branchName}.${tag} > version\necho asapo_dev=\"-dev\" >> version\nelse\necho asapo_tag=${bamboo.planRepository.branchName}.latest > version\necho asapo_dev=\"-dev\" >> version\nfi"),
+                                new InjectVariablesTask()
+                                    .description("Set docker tag")
+                                    .path("version")
+                                    .namespace("inject")
+                                    .scope(InjectVariablesScope.RESULT),
+                                new ArtifactDownloaderTask()
+                                    .description("Copy All")
+                                    .artifacts(new DownloadItem()
+                                            .artifact("Broker")
+                                            .path("broker/docker"),
+                                        new DownloadItem()
+                                            .artifact("Authorizer")
+                                            .path("authorizer/docker"),
+                                        new DownloadItem()
+                                            .artifact("Discovery")
+                                            .path("discovery/docker"),
+                                        new DownloadItem()
+                                            .artifact("Receiver")
+                                            .path("receiver/docker")),
+                                new DockerBuildImageTask()
+                                    .description("Build image")
+                                    .enabled(false)
+                                    .workingSubdirectory("receiver/docker")
+                                    .imageName("yakser/asapo-receiver${bamboo.inject.asapo_dev}:${bamboo.inject.asapo_tag}")
+                                    .useCache(true)
+                                    .dockerfileInWorkingDir(),
+                                new DockerPushImageTask()
+                                    .dockerHubImage("yakser/asapo-receiver${bamboo.inject.asapo_dev}:${bamboo.inject.asapo_tag}")
+                                    .authentication("yakubov", /*FIXME put your password here*/),
+                                new ScriptTask()
+                                    .description("Build Dockers")
+                                    .inlineBody("set -e\n\nservices=\"broker authorizer discovery receiver\"\n\ndocker login -u=${bamboo.docker_username} -p=${bamboo.docker_userpassword}\n\n\nfor service in $services\ndo\n cd ${service}/docker\n docker build -t yakser/asapo-${service}${bamboo.inject.asapo_dev}:${bamboo.inject.asapo_tag} .\n docker push yakser/asapo-${service}${bamboo.inject.asapo_dev}:${bamboo.inject.asapo_tag}\n cd -\ndone\n\ndocker logout"))
+                            .requirements(new Requirement("system.docker.executable"))),
+                new Stage("Push Binaries")
+                    .jobs(new Job("Push Binaries",
+                            new BambooKey("PWB"))
+                            .tasks(new ArtifactDownloaderTask()
+                                    .description("Get Binaries")
+                                    .artifacts(new DownloadItem()
+                                            .artifact("Dummy Producer Linux"),
+                                        new DownloadItem()
+                                            .artifact("Worker Linux"),
+                                        new DownloadItem()
+                                            .artifact("File Monitor Producer Windows"),
+                                        new DownloadItem()
+                                            .artifact("File Monitor Producer Linux"),
+                                        new DownloadItem()
+                                            .artifact("Dummy Producer Windows")),
+                                new ScriptTask()
+                                    .description("Push binaries")
+                                    .inlineBody("#!/bin/bash\nset -e\n\nfor path in *; do\n if [ -f \"${path}\" ]; then\n  root=\"${path#.}\";root=\"${path%\"$root\"}${root%.*}\"\n  ext=\"${path#\"$root\"}\"\n  echo $path\n  mv \"${path}\" \"${root}-${bamboo.inject.asapo_tag}${ext}\"\n fi\ndone\n\nrsync -avr . mirror@it-fs5:/\nwget -q -O- http://it-fs5.desy.de/cgi-bin/asapoSync.cgi\n\nrm *"))
+                            .requirements(new Requirement("system.docker.executable"))))
+            .planRepositories(new BitbucketServerRepository()
+                    .name("ASAPO")
+                    .oid(new BambooOid("yfbuqh970z6t"))
+                    .repositoryViewer(new BitbucketServerRepositoryViewer())
+                    .server(new ApplicationLink()
+                            .name("DESY Stash")
+                            .id("6a33db2c-8d71-3528-b029-8c5fcbe62101"))
+                    .projectKey("ASAPO")
+                    .repositorySlug("asapo")
+                    .sshPublicKey("ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCZHqvrcOMwtyVynQZMymalz2kKb5iaU9fhXYZp8EdM+OBFJeSTOeSksRhpNYrXSKwGH8H6yuRuEu2DF81ETJB32FVftV27KIg4i0bxd2jYPBGOf2aji7FdJkIc5e6VtSfUHKa7FiZLeXrkUjihIhQhXc+UUzvzHEpZBbYNoYf/2MztJU7djIQ/L0dXjfK0hemBzhbPmzPAQf8308PzbQIY7Fiba+LKpgJHWkQEluptBj8xJSt4l7akA6yXS3KIk3CsoCobCOKbtjK32U8OZCOdU+6Ne4ZgwHnjxH/BlZB4DpZzxx/PA+zNyzO9gl4h6OSoHFpszYxFmP7dkdVhNjLp https://bamboo.desy.de")
+                    .sshPrivateKey("-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAmR6r63DjMLclcp0GTMpmpc9pCm+YmlPX4V2GafBHTPjgRSXk\nkznkpLEYaTWK10isBh/B+srkbhLtgxfNREyQd9hVX7VduyiIOItG8Xdo2DwRjn9m\no4uxXSZCHOXulbUn1BymuxYmS3l65FI4oSIUIV3PlFM78xxKWQW2DaGH/9jM7SVO\n3YyEPy9HV43ytIXpgc4Wz5szwEH/N9PD820CGOxYm2viyqYCR1pEBJbqbQY/MSUr\neJe2pAOsl0tyiJNwrKAqGwjim7Yyt9lPDmQjnVPujXuGYMB548R/wZWQeA6Wc8cf\nzwPszcszvYJeIejkqBxabM2MRZj+3ZHVYTYy6QIDAQABAoIBAAb8XT+aoOLiGDVI\ncLckLNa3xcUSHlY8KQdBKxa+PaKHYEQHXIxTAdl5svFp3fHAgQiufj0g8JcNCgUH\nGvhYveaZ2htJvQMC8GQUBqKRnhjvdKWZrmcbwnXLfwyueNofr7d/ryOt+QVMf4zX\nK3o+gcib7RKJUZzf14yENDAfBINpoKJ2yQe42F80KagKGIxKsUqdw6EngICH61o+\n0L3aqKytCw3xwPqVqREKEAjrwkFUgZVU/LhRAfG7hV1AYaKBOV/xJw5SIzjcz8rK\nGVzfrKeI9RnjC852z9E1rkFTY6OC3Lgb+EGqWw7wEydt7Y0NsZCrqa+Qc1anU9DE\nKiQKNAECgYEA1R1PJMHB5RbueiCqINPg4jfq94CLj0BkOlSN84/hBt3WEf736fw9\nU0k7IiKOiotfd2jwUMKboB0pyaCjlq9zRqX5Aw3ssOLFDcJx3BdUY78jO6H9zdOh\nD1aTp/qXYrJpE3lCuGCFHWznuIFLC9mHlQAGlZ6tG/GaM531pcHftSkCgYEAt+61\ngF5p3U/vOAydj+r74azxfDZOWEvGJfcfM3pfahQRrO01OE5wlyF4mPrhO1CMVOVE\nmo5zwrpwe0x3qkjewwMu28B2/Tz2kWLViq8NkFTkGe67ZIP+g7i6ZHLGRdbmDGYq\npO/mARkLB4VpwKWZFOQVkShbcDvLNhe2pnGWh8ECgYEAq0/2MOv4O3nllhLv75ei\nrPaaUP7qMOtoJmOWAHZmQ6jLzoeRLmxvt6WkfVoeD0zeHxUiKSlnBJys3MHe/uBm\n4CHHPCdTXxXLpbXq5Stz0QLzBZrAdVZrn/LOmzebveEBCoBtm90q5G6JDw4QD6R7\nktEEef2l1lKuzFNsBiUE+ZECgYBjqFX1MLKhc+8EamlwkfxZwW+yQmZelufMqEHi\njXpnKmqNTJUaZf3BFSSXls80aScv1G1AZNC6AHRzifIIdKUl+nKIJJcUDNT33xoe\no0xxGF3i4yPriUz8p7luNXXSX2aT70NJzKXNkHkWYSX0eIUh+Zbp6HjqraskKuMO\n+dp6wQKBgQCddiXABL0nbUigPEfrQRAIwHMD0RpoRlHeAVjZ0XYKxuSt0glxbAP8\nDFUM845WcRcAQ4OPIGDAvN2B/u9kp+0djOWdfQOHUIKQUisgEvg7mbMhsiD2uW1F\nsSEfQkBNz2aES82dTR0aGzNE6nWihI8RofFEMa03+S6Czc3CU0QJjg==\n-----END RSA PRIVATE KEY-----\n")
+                    .sshCloneUrl("ssh://git@stash.desy.de:7999/asapo/asapo.git")
+                    .branch("develop")
+                    .remoteAgentCacheEnabled(false)
+                    .changeDetection(new VcsChangeDetection()))
+
+            .triggers(new BitbucketServerTrigger())
+            .variables(new Variable("docker_username",
+                    "yakser"),
+                new Variable("docker_userpassword",
+                    "BAMSCRT@0@0@SXl1He3DrKr2Dso1VPIn2g=="))
+            .planBranchManagement(new PlanBranchManagement()
+                    .createForVcsBranch()
+                    .delete(new BranchCleanup()
+                        .whenRemovedFromRepositoryAfterDays(7))
+                    .notificationForCommitters())
+            .notifications(new Notification()
+                    .type(new FirstFailedJobNotification())
+                    .recipients(new ResponsibleRecipient()));
+        return plan;
+    }
+
+    public PlanPermissions planPermission() {
+        final PlanPermissions planPermission = new PlanPermissions(new PlanIdentifier("BHID2", "BT"))
+            .permissions(new Permissions()
+                    .userPermissions("tcallsen", PermissionType.VIEW, PermissionType.EDIT)
+                    .userPermissions("yakubov", PermissionType.EDIT, PermissionType.VIEW, PermissionType.ADMIN, PermissionType.CLONE, PermissionType.BUILD)
+                    .userPermissions("cpatzke", PermissionType.VIEW, PermissionType.EDIT, PermissionType.BUILD, PermissionType.CLONE, PermissionType.ADMIN)
+                    .anonymousUserPermissionView());
+        return planPermission;
+    }
+
+    public static void main(String... argv) {
+        //By default credentials are read from the '.credentials' file.
+        BambooServer bambooServer = new BambooServer("https://bamboo.desy.de");
+        final PlanSpec planSpec = new PlanSpec();
+
+        final Plan plan = planSpec.plan();
+        bambooServer.publish(plan);
+
+        final PlanPermissions planPermission = planSpec.planPermission();
+        bambooServer.publish(planPermission);
+    }
+}
diff --git a/config/nomad/authorizer.nmd.in b/config/nomad/authorizer.nmd.in
index e75992113d54a93c17de3d20b28d84d6289e509e..fd38b4c8e0b1b9578fbfcdf7861c418fa811d8b6 100644
--- a/config/nomad/authorizer.nmd.in
+++ b/config/nomad/authorizer.nmd.in
@@ -6,7 +6,7 @@ job "authorizer" {
   group "group" {
     count = 1
 
-    task "service" {
+    task "authorizer" {
       driver = "raw_exec"
 
       config {
diff --git a/config/nomad/broker.nmd.in b/config/nomad/broker.nmd.in
index 2d1ad5c1d42202ea3074690328f1c51346ba563c..dc2a629f07df368d1d05ff9cc6854396916ec7e9 100644
--- a/config/nomad/broker.nmd.in
+++ b/config/nomad/broker.nmd.in
@@ -6,7 +6,7 @@ job "broker" {
   group "group" {
     count = 1
 
-    task "service" {
+    task "broker" {
       driver = "raw_exec"
 
       config {
diff --git a/config/nomad/discovery.nmd.in b/config/nomad/discovery.nmd.in
index 60a8a174e1f5013ea963bd090684c6b9866cd504..cf39f034849d0e93c1435e3bc4bed6f13cfdfe0f 100644
--- a/config/nomad/discovery.nmd.in
+++ b/config/nomad/discovery.nmd.in
@@ -6,7 +6,7 @@ job "discovery" {
   group "group" {
     count = 1
 
-    task "service" {
+    task "discovery" {
       driver = "raw_exec"
 
       config {
diff --git a/config/nomad/receiver.nmd.in b/config/nomad/receiver.nmd.in
index 9642abb2314c77eb557a070c5e37ea1ef4dc30cd..a27372d18709ad38d81e73d74c07913b7672da4b 100644
--- a/config/nomad/receiver.nmd.in
+++ b/config/nomad/receiver.nmd.in
@@ -6,7 +6,7 @@ job "receiver" {
   group "group" {
     count = 1
 
-    task "service" {
+    task "receiver" {
       driver = "raw_exec"
 
       config {
diff --git a/examples/worker/getnext_broker/check_linux.sh b/examples/worker/getnext_broker/check_linux.sh
index c5a45345d7630048d8e11740bc550bdaf80f4f85..e50f058ad3d5d61202060ae22857f40459cf5092 100644
--- a/examples/worker/getnext_broker/check_linux.sh
+++ b/examples/worker/getnext_broker/check_linux.sh
@@ -1,5 +1,6 @@
 #!/usr/bin/env bash
 
+source_path=dummy
 database_name=test_run
 token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
 
@@ -26,7 +27,7 @@ done
 
 sleep 1
 
-$@ 127.0.0.1:8400 $database_name 2 $token_test_run 1000 1 | grep "Processed 3 file(s)"
+$@ 127.0.0.1:8400 $source_path $database_name 2 $token_test_run 1000 1 | grep "Processed 3 file(s)"
 
 
 
diff --git a/examples/worker/getnext_broker/check_windows.bat b/examples/worker/getnext_broker/check_windows.bat
index c9f45b73079028ce6e64de2eb701ddb99b423f97..eaabc479ac42e9bfbbcdd6b9710a44a23de75c3d 100644
--- a/examples/worker/getnext_broker/check_windows.bat
+++ b/examples/worker/getnext_broker/check_windows.bat
@@ -1,4 +1,6 @@
+SET source_path=dummy
 SET database_name=test_run
+
 SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe"
 set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
 
@@ -11,7 +13,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul
 for /l %%x in (1, 1, 3) do echo db.data.insert({"_id":%%x,"size":100,"name":"%%x","lastchange":1,"source":"none","buf_id":0}) | %mongo_exe% %database_name%  || goto :error
 
 
-"%1" 127.0.0.1:8400 %database_name% 1 %token_test_run% 1000 1 | findstr /c:"Processed 3 file" || goto :error
+"%1" 127.0.0.1:8400 %source_path% %database_name% 1 %token_test_run% 1000 1 | findstr /c:"Processed 3 file" || goto :error
 goto :clean
 
 :error
diff --git a/examples/worker/getnext_broker/getnext_broker.cpp b/examples/worker/getnext_broker/getnext_broker.cpp
index e69aef1aca864bbc104e8b5b48eb8b336048fb36..6e8707a79512a90e09b79c6f307deb27def97a58 100644
--- a/examples/worker/getnext_broker/getnext_broker.cpp
+++ b/examples/worker/getnext_broker/getnext_broker.cpp
@@ -14,6 +14,7 @@ using asapo::Error;
 
 struct Params {
     std::string server;
+    std::string file_path;
     std::string beamtime_id;
     std::string token;
     int timeout_ms;
@@ -41,11 +42,12 @@ std::vector<std::thread> StartThreads(const Params& params, std::vector<int>* nf
     auto exec_next = [&params, nfiles, errors](int i) {
         asapo::FileInfo fi;
         Error err;
-        auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.beamtime_id, params.token, &err);
+        auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.file_path, params.beamtime_id,
+                      params.token, &err);
         broker->SetTimeout(params.timeout_ms);
         asapo::FileData data;
         while ((err = broker->GetNext(&fi, params.read_data ? &data : nullptr)) == nullptr) {
-            if (params.read_data && (*nfiles)[i] < 10) {
+            if (params.read_data && (*nfiles)[i] < 10 && fi.size < 100) {
                 std::string s(reinterpret_cast<char const*>(data.get()));
                 std::cout << "Received: " << s << std::endl;
             }
@@ -86,18 +88,20 @@ int ReadAllData(const Params& params, uint64_t* duration_ms) {
 
 int main(int argc, char* argv[]) {
     asapo::ExitAfterPrintVersionIfNeeded("GetNext Broker Example", argc, argv);
-    if (argc != 7) {
-        std::cout << "Usage: " + std::string{argv[0]} +" <server> <run_name> <nthreads> <token> <timeout ms> <metaonly>" <<
+    if (argc != 8) {
+        std::cout << "Usage: " + std::string{argv[0]} +" <server> <files_path> <run_name> <nthreads> <token> <timeout ms> <metaonly>"
+                  <<
                   std::endl;
         exit(EXIT_FAILURE);
     }
     Params params;
     params.server = std::string{argv[1]};
-    params.beamtime_id = std::string{argv[2]};
-    params.nthreads = atoi(argv[3]);
-    params.token = std::string{argv[4]};
-    params.timeout_ms = atoi(argv[5]);
-    params.read_data = atoi(argv[6]) != 1;
+    params.file_path = std::string{argv[2]};
+    params.beamtime_id = std::string{argv[3]};
+    params.nthreads = atoi(argv[4]);
+    params.token = std::string{argv[5]};
+    params.timeout_ms = atoi(argv[6]);
+    params.read_data = atoi(argv[7]) != 1;
 
     uint64_t duration_ms;
     auto nfiles = ReadAllData(params, &duration_ms);
diff --git a/producer/api/src/request_handler_filesystem.cpp b/producer/api/src/request_handler_filesystem.cpp
index c5270e4bc9e1f6040c91002f201f94c43b294107..dcd27a114943b303ba3e6965861f819a16b314d7 100644
--- a/producer/api/src/request_handler_filesystem.cpp
+++ b/producer/api/src/request_handler_filesystem.cpp
@@ -14,7 +14,6 @@ namespace asapo {
 RequestHandlerFilesystem::RequestHandlerFilesystem(std::string destination_folder, uint64_t thread_id):
     io__{GenerateDefaultIO()}, log__{GetDefaultProducerLogger()}, destination_folder_{std::move(destination_folder)},
     thread_id_{thread_id} {
-
 }
 
 Error RequestHandlerFilesystem::ProcessRequestUnlocked(GenericRequest* request) {
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
index 0d5ca0cda286ab2a8e089c7e829290b639a26a5c..c5b9762d7fad46fdc247acee6fe445debcbcad95 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
@@ -51,7 +51,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul
 
 
 REM worker
-"%2" %proxy_address% %beamtime_id% 2 %token% 1000 1 | findstr /c:"Processed 3 file(s)"  || goto :error
+"%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 1 | findstr /c:"Processed 3 file(s)"  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
index 89fccb0fdb6511e30b1ad6ac4090ab0eefc36e6f..a75f4dfeb0e846ab5e7d58515e8bb205e6437f4d 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
+++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
@@ -32,7 +32,7 @@ int main(int argc, char* argv[]) {
     auto args = GetArgs(argc, argv);
 
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri, "", "", &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri, "", "", "", &err);
     auto server_broker = static_cast<asapo::ServerDataBroker*>(broker.get());
 
     asapo::HttpCode code;
diff --git a/tests/automatic/full_chain/CMakeLists.txt b/tests/automatic/full_chain/CMakeLists.txt
index a4baadb24c0ec93b77bdb904d9c1995bb9decf52..0b813e3898f37c65a48e2b9063b788fbde3a954f 100644
--- a/tests/automatic/full_chain/CMakeLists.txt
+++ b/tests/automatic/full_chain/CMakeLists.txt
@@ -1,4 +1,5 @@
 add_subdirectory(simple_chain)
 add_subdirectory(two_beamlines)
 add_subdirectory(simple_chain_filegen)
-add_subdirectory(simple_chain_filegen_readdata)
+add_subdirectory(simple_chain_filegen_readdata_cache)
+add_subdirectory(simple_chain_filegen_readdata_file)
diff --git a/tests/automatic/full_chain/simple_chain/check_linux.sh b/tests/automatic/full_chain/simple_chain/check_linux.sh
index e62d00189e772a08022b24f94a0f479e4ba49e64..47b94d971cbb3d55a0b312a13451a9403d52a968 100644
--- a/tests/automatic/full_chain/simple_chain/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain/check_linux.sh
@@ -44,4 +44,4 @@ $1 localhost:8400 ${beamtime_id} 100 1000 4 0 100 &
 #producerid=`echo $!`
 
 
-$2 ${proxy_address} ${beamtime_id} 2 $token 1000 1 | grep "Processed 1000 file(s)"
+$2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 1 | grep "Processed 1000 file(s)"
diff --git a/tests/automatic/full_chain/simple_chain/check_windows.bat b/tests/automatic/full_chain/simple_chain/check_windows.bat
index 37b6b5cfeb1c91aa2dd52ebbf86938438c7fbd8b..4701e42d90d2ebc8a4b900b551cc552addaf2405 100644
--- a/tests/automatic/full_chain/simple_chain/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain/check_windows.bat
@@ -26,7 +26,7 @@ start /B "" "%1" %proxy_address% %beamtime_id% 100 1000 4 0 100
 ping 1.0.0.0 -n 1 -w 100 > nul
 
 REM worker
-"%2" %proxy_address% %beamtime_id% 2 %token% 1000 1 | findstr /c:"Processed 1000 file(s)"  || goto :error
+"%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 1 | findstr /c:"Processed 1000 file(s)"  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/full_chain/simple_chain_filegen/CMakeLists.txt b/tests/automatic/full_chain/simple_chain_filegen/CMakeLists.txt
index 2df30fa61e0c07b7d80307d4f08a0f1013395ac3..768562c03d2341c99607114de84028cef78ac9c6 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/CMakeLists.txt
+++ b/tests/automatic/full_chain/simple_chain_filegen/CMakeLists.txt
@@ -1,4 +1,4 @@
-set(TARGET_NAME full_chain_simple_chain)
+set(TARGET_NAME full_chain_simple_chain_filegen_producer)
 
 ################################
 # Testing
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
index 35cbb39ea19528f0b1f442330541c68f0c1bb255..7d0145df104d38a38e54a90bcc6622334743eefc 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
@@ -51,4 +51,4 @@ echo hello > /tmp/asapo/test_in/test1/file1
 echo hello > /tmp/asapo/test_in/test1/file2
 echo hello > /tmp/asapo/test_in/test2/file2
 
-$2 ${proxy_address} ${beamtime_id} 2 $token 1000 1 | grep "Processed 3 file(s)"
+$2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 1 | grep "Processed 3 file(s)"
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
index 1ccd90e131199a0e39a50a1b6d3d4a68dd78e207..b92529afea36cac93da1cbdc0b4495b26031c2b5 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
@@ -40,7 +40,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul
 
 
 REM worker
-"%2" %proxy_address% %beamtime_id% 2 %token% 1000  1 | findstr /c:"Processed 3 file(s)"  || goto :error
+"%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000  1 | findstr /c:"Processed 3 file(s)"  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/CMakeLists.txt b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7acab0298869b8bd336b8c6d5dd610aebd638451
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/CMakeLists.txt
@@ -0,0 +1,16 @@
+set(TARGET_NAME full_chain_simple_chain_withdata_fromcache)
+
+################################
+# Testing
+################################
+set(RECEIVER_WRITE_TO_DISK false)
+prepare_asapo()
+if (UNIX)
+    set (ROOT_PATH "/tmp/asapo/")
+else()
+    set (ROOT_PATH "c:\\\\tmp\\\\asapo\\\\")
+endif()
+
+configure_file(test.json.in test.json @ONLY)
+
+add_script_test("${TARGET_NAME}" "$<TARGET_FILE:event-monitor-producer-bin> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME>" nomem)
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
similarity index 94%
rename from tests/automatic/full_chain/simple_chain_filegen_readdata/check_linux.sh
rename to tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
index 33b50a3b79eda29686bf3633dcb773640516c6e3..5635097a37a0104889b31d47754768126047dc26 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
@@ -52,7 +52,7 @@ echo -n hello1 > /tmp/asapo/test_in/test1/file1
 echo -n hello2 > /tmp/asapo/test_in/test1/file2
 echo -n hello3 > /tmp/asapo/test_in/test2/file2
 
-$2 ${proxy_address} ${beamtime_id} 2 $token 1000 0 > out.txt
+$2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 > out.txt
 cat out.txt
 grep "Processed 3 file(s)" out.txt
 grep "Received: hello1" out.txt
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
new file mode 100644
index 0000000000000000000000000000000000000000..31dfcfea0f27d06d988578aa6fe795b250271b36
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
@@ -0,0 +1,72 @@
+
+
+SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe"
+SET beamtime_id=asapo_test
+SET beamline=test
+SET receiver_root_folder=c:\tmp\asapo\receiver\files
+SET receiver_folder="%receiver_root_folder%\%beamline%\%beamtime_id%"
+
+set producer_short_name="%~nx1"
+
+
+"%3" token -secret broker_secret.key %beamtime_id% > token
+set /P token=< token
+
+set proxy_address="127.0.0.1:8400"
+
+echo db.%beamtime_id%.insert({dummy:1}) | %mongo_exe% %beamtime_id%
+
+c:\opt\consul\nomad run receiver.nmd
+c:\opt\consul\nomad run authorizer.nmd
+c:\opt\consul\nomad run discovery.nmd
+c:\opt\consul\nomad run broker.nmd
+c:\opt\consul\nomad run nginx.nmd
+
+ping 1.0.0.0 -n 10 -w 100 > nul
+
+REM producer
+mkdir %receiver_folder%
+mkdir  c:\tmp\asapo\test_in\test1
+mkdir  c:\tmp\asapo\test_in\test2
+start /B "" "%1" test.json
+
+ping 1.0.0.0 -n 3 -w 100 > nul
+
+echo hello1 > c:\tmp\asapo\test_in\test1\file1
+echo hello2 > c:\tmp\asapo\test_in\test1\file2
+echo hello3 > c:\tmp\asapo\test_in\test2\file2
+
+ping 1.0.0.0 -n 10 -w 100 > nul
+
+
+REM worker
+"%2" %proxy_address%  %receiver_folder% %beamtime_id% 2 %token% 1000 0 > out.txt
+type out.txt
+findstr /i /l /c:"Processed 3 file(s)" out.txt || goto :error
+findstr /i /l /c:"Received: hello1" out.txt || goto :error
+findstr /i /l /c:"Received: hello2" out.txt || goto :error
+findstr /i /l /c:"Received: hello3" out.txt || goto :error
+
+
+goto :clean
+
+:error
+call :clean
+exit /b 1
+
+:clean
+c:\opt\consul\nomad stop receiver
+c:\opt\consul\nomad stop discovery
+c:\opt\consul\nomad stop broker
+c:\opt\consul\nomad stop authorizer
+c:\opt\consul\nomad stop nginx
+rmdir /S /Q %receiver_root_folder%
+rmdir /S /Q c:\tmp\asapo\test_in\test1
+rmdir /S /Q c:\tmp\asapo\test_in\test2
+Taskkill /IM "%producer_short_name%" /F
+del /f out.txt
+
+del /f token
+echo db.dropDatabase() | %mongo_exe% %beamtime_id%
+
+
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
similarity index 100%
rename from tests/automatic/full_chain/simple_chain_filegen_readdata/test.json.in
rename to tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata/CMakeLists.txt b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/CMakeLists.txt
similarity index 81%
rename from tests/automatic/full_chain/simple_chain_filegen_readdata/CMakeLists.txt
rename to tests/automatic/full_chain/simple_chain_filegen_readdata_file/CMakeLists.txt
index 56fa6565e797d3aeaa6eab913c4897fad0b46209..b5bfac21ef5109e54f34d314bcf07a4f9b7f9f93 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata/CMakeLists.txt
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/CMakeLists.txt
@@ -1,8 +1,9 @@
-set(TARGET_NAME full_chain_simple_chain_withdata)
+set(TARGET_NAME full_chain_simple_chain_withdata_fromfile)
 
 ################################
 # Testing
 ################################
+set(RECEIVER_USE_CACHE false)
 prepare_asapo()
 if (UNIX)
     set (ROOT_PATH "/tmp/asapo/")
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
new file mode 100644
index 0000000000000000000000000000000000000000..5635097a37a0104889b31d47754768126047dc26
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
@@ -0,0 +1,61 @@
+#!/usr/bin/env bash
+
+set -e
+
+trap Cleanup EXIT
+
+beamtime_id=asapo_test
+token=`$3 token -secret broker_secret.key $beamtime_id`
+
+monitor_database_name=db_test
+proxy_address=127.0.0.1:8400
+
+beamline=test
+receiver_root_folder=/tmp/asapo/receiver/files
+receiver_folder=${receiver_root_folder}/${beamline}/${beamtime_id}
+
+mkdir -p /tmp/asapo/test_in/test1/
+mkdir -p /tmp/asapo/test_in/test2/
+
+Cleanup() {
+    echo cleanup
+    kill $producerid
+    rm -rf /tmp/asapo/test_in/test1
+    rm -rf /tmp/asapo/test_in/test2
+    nomad stop nginx
+    nomad stop receiver
+    nomad stop discovery
+    nomad stop broker
+    nomad stop authorizer
+    echo "db.dropDatabase()" | mongo ${beamtime_id}
+    rm out.txt
+}
+
+echo "db.${beamtime_id}.insert({dummy:1})" | mongo ${beamtime_id}
+
+nomad run nginx.nmd
+nomad run authorizer.nmd
+nomad run receiver.nmd
+nomad run discovery.nmd
+nomad run broker.nmd
+
+sleep 1
+
+#producer
+mkdir -p ${receiver_folder}
+$1 test.json &
+producerid=`echo $!`
+
+sleep 1
+
+echo -n hello1 > /tmp/asapo/test_in/test1/file1
+echo -n hello2 > /tmp/asapo/test_in/test1/file2
+echo -n hello3 > /tmp/asapo/test_in/test2/file2
+
+$2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 > out.txt
+cat out.txt
+grep "Processed 3 file(s)" out.txt
+grep "Received: hello1" out.txt
+grep "Received: hello2" out.txt
+grep "Received: hello3" out.txt
+
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
similarity index 95%
rename from tests/automatic/full_chain/simple_chain_filegen_readdata/check_windows.bat
rename to tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
index 17bc19dcad2694aaa8b3a1557c4710fffa10f4ab..96baf4c0dd41bbddd19878a3e816d08204c0669a 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
@@ -40,7 +40,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul
 
 
 REM worker
-"%2" %proxy_address% %beamtime_id% 2 %token% 1000 0 > out.txt
+"%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 0 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 3 file(s)" out.txt || goto :error
 findstr /i /l /c:"Received: hello1" out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
new file mode 100644
index 0000000000000000000000000000000000000000..f072140a4ea55aa15aae487ce7d61cd80219fe31
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
@@ -0,0 +1,12 @@
+{
+ "AsapoEndpoint":"localhost:8400",
+ "Tag":"test_tag",
+ "BeamtimeID":"asapo_test",
+ "Mode":"tcp",
+ "NThreads":1,
+ "LogLevel":"debug",
+ "RootMonitoredFolder":"@ROOT_PATH@test_in",
+ "MonitoredSubFolders":["test1","test2"],
+ "IgnoreExtentions":["tmp"],
+ "RemoveAfterSend":true
+}
diff --git a/tests/automatic/full_chain/two_beamlines/check_linux.sh b/tests/automatic/full_chain/two_beamlines/check_linux.sh
index 1376767eb9c5dc87ce3d1263fa1a59c20765f777..c688445e93c279a292921b82366d700b91971511 100644
--- a/tests/automatic/full_chain/two_beamlines/check_linux.sh
+++ b/tests/automatic/full_chain/two_beamlines/check_linux.sh
@@ -53,5 +53,5 @@ $1 localhost:8400 ${beamtime_id2} 100 900 4 0 100 &
 #producerid=`echo $!`
 
 #workers
-$2 ${proxy_address} ${beamtime_id1} 2 $token1 2000 1  | tee /dev/stderr | grep "Processed 1000 file(s)"
-$2 ${proxy_address} ${beamtime_id2} 2 $token2 2000 1 | tee /dev/stderr | grep "Processed 900 file(s)"
+$2 ${proxy_address} ${receiver_folder1} ${beamtime_id1} 2 $token1 2000 0  | tee /dev/stderr | grep "Processed 1000 file(s)"
+$2 ${proxy_address} ${receiver_folder2} ${beamtime_id2} 2 $token2 2000 0 | tee /dev/stderr | grep "Processed 900 file(s)"
diff --git a/tests/automatic/full_chain/two_beamlines/check_windows.bat b/tests/automatic/full_chain/two_beamlines/check_windows.bat
index 48d86faaaa0bc8f713a096691bd52241a60a4b55..328ec06742ec4d222d543b80831040d459d107fc 100644
--- a/tests/automatic/full_chain/two_beamlines/check_windows.bat
+++ b/tests/automatic/full_chain/two_beamlines/check_windows.bat
@@ -35,8 +35,8 @@ start /B "" "%1" %proxy_address% %beamtime_id2% 100 900 4 0 100
 ping 1.0.0.0 -n 1 -w 100 > nul
 
 REM worker
-"%2" %proxy_address% %beamtime_id1% 2 %token1% 1000 1 | findstr /c:"Processed 1000 file(s)"  || goto :error
-"%2" %proxy_address% %beamtime_id2% 2 %token2% 1000 1 | findstr /c:"Processed 900 file(s)"  || goto :error
+"%2" %proxy_address% %receiver_folder1% %beamtime_id1% 2 %token1% 1000 0 | findstr /c:"Processed 1000 file(s)"  || goto :error
+"%2" %proxy_address% %receiver_folder2% %beamtime_id2% 2 %token2% 1000 0 | findstr /c:"Processed 900 file(s)"  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/high_avail/broker_mongo_restart/CMakeLists.txt b/tests/automatic/high_avail/broker_mongo_restart/CMakeLists.txt
index 7ec07a04e360a84d57bcc234422db38336b42a86..0ba1d0de4810af2b0cdc35cbb688b7a7e2f8f043 100644
--- a/tests/automatic/high_avail/broker_mongo_restart/CMakeLists.txt
+++ b/tests/automatic/high_avail/broker_mongo_restart/CMakeLists.txt
@@ -3,5 +3,6 @@ set(TARGET_NAME broker_mongo_restart)
 ################################
 # Testing
 ################################
+set(RECEIVER_WRITE_TO_DISK false)
 prepare_asapo()
 add_script_test("${TARGET_NAME}" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME>" nomem)
diff --git a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
index 95114f8c6ec2ef8aaea5ffcf1dd8f5ccc4d03f55..415bef108f3b07a5846a808318d8f7ecab208a10 100644
--- a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
@@ -55,7 +55,6 @@ Cleanup() {
 influx -execute "create database ${monitor_database_name}"
 
 sed -i 's/27017/27016/g' receiver.json.tpl
-sed -i 's/"WriteToDisk":true/"WriteToDisk":false/g' receiver.json.tpl
 sed -i 's/27017/27016/g' broker.json.tpl
 sed -i 's/info/debug/g' broker.json.tpl
 
@@ -82,7 +81,7 @@ $1 localhost:8400 ${beamtime_id} 100 1000 4 0 100 &
 
 wait
 
-$2 ${proxy_address} ${beamtime_id} 2 $token 10000 1 &> output.txt &
+$2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 10000 0 &> output.txt &
 
 sleep 2
 
diff --git a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
index e7a5dc5e57a84e402d2e62ca07c0847c3c066c48..e1513ab11800672b03220965135b8fe66266b509 100644
--- a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
@@ -55,7 +55,6 @@ wait_mongo
 echo "db.${beamtime_id}.insert({dummy:1})" | mongo --port 27016 ${beamtime_id}
 
 sed -i 's/27017/27016/g' receiver.json.tpl
-#sed -i 's/"WriteToDisk":true/"WriteToDisk":false/g' receiver.json.tpl
 
 
 nomad run authorizer.nmd
diff --git a/tests/automatic/high_avail/services_restart/CMakeLists.txt b/tests/automatic/high_avail/services_restart/CMakeLists.txt
index b76e24f603dd672e65a59a6cc16ec261d61d84e2..5f7029bf9725d9a2c23e59c6556c73f02c3d0a73 100644
--- a/tests/automatic/high_avail/services_restart/CMakeLists.txt
+++ b/tests/automatic/high_avail/services_restart/CMakeLists.txt
@@ -3,6 +3,7 @@ set(TARGET_NAME service_restart)
 ################################
 # Testing
 ################################
+set(RECEIVER_WRITE_TO_DISK false)
 prepare_asapo()
 add_script_test("${TARGET_NAME}-all" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME> broker 1000 998" nomem)
 add_script_test("${TARGET_NAME}-all-but-broker" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME> receiver 1000 1000" nomem)
diff --git a/tests/automatic/high_avail/services_restart/check_linux.sh b/tests/automatic/high_avail/services_restart/check_linux.sh
index c63b99c112a67a2d5e36cc9ef96706c03464fb37..61ca663cc93e6225408f6cfc39d5f85778fe06c4 100644
--- a/tests/automatic/high_avail/services_restart/check_linux.sh
+++ b/tests/automatic/high_avail/services_restart/check_linux.sh
@@ -25,7 +25,6 @@ Cleanup() {
 
 influx -execute "create database ${monitor_database_name}"
 
-sed -i 's/"WriteToDisk":true/"WriteToDisk":false/g' receiver.json.tpl
 sed -i 's/info/debug/g' broker.json.tpl
 
 nomad run nginx.nmd
@@ -46,7 +45,7 @@ $1 localhost:8400 ${beamtime_id} 100 $5 4 0 100 &
 
 
 #worker
-$2 ${proxy_address} ${beamtime_id} 2 $token 30000 1 &> output.txt &
+$2 ${proxy_address} dummy_path ${beamtime_id} 2 $token 30000 1 &> output.txt &
 
 sleep 1
 
diff --git a/tests/automatic/settings/receiver.json.tpl.lin b/tests/automatic/settings/receiver.json.tpl.lin.in
similarity index 83%
rename from tests/automatic/settings/receiver.json.tpl.lin
rename to tests/automatic/settings/receiver.json.tpl.lin.in
index b8a63465a70376d1fa3c7beee7bbbe30f5d580eb..e662a64df737054b00632879f8e50db284a15e0a 100644
--- a/tests/automatic/settings/receiver.json.tpl.lin
+++ b/tests/automatic/settings/receiver.json.tpl.lin.in
@@ -7,7 +7,7 @@
     "ListenPort": 23123
   },
   "DataCache": {
-    "Use": true,
+    "Use": @RECEIVER_USE_CACHE@,
     "SizeGB": 1,
     "ReservedShare": 10
   },
@@ -15,8 +15,8 @@
   "AuthorizationInterval": 10000,
   "ListenPort": {{ env "NOMAD_PORT_recv" }},
   "Tag": "{{ env "NOMAD_ADDR_recv" }}",
-  "WriteToDisk":true,
-  "WriteToDb":true,
+  "WriteToDisk": @RECEIVER_WRITE_TO_DISK@,
+  "WriteToDb": true,
   "LogLevel" : "debug",
   "RootFolder" : "/tmp/asapo/receiver/files"
 }
diff --git a/tests/automatic/settings/receiver.json.tpl.win b/tests/automatic/settings/receiver.json.tpl.win.in
similarity index 83%
rename from tests/automatic/settings/receiver.json.tpl.win
rename to tests/automatic/settings/receiver.json.tpl.win.in
index a8fd5eec65934a148d91f96e837a556582317e49..511c2cda7e297f59f5593535e4cbc588a3ad7df5 100644
--- a/tests/automatic/settings/receiver.json.tpl.win
+++ b/tests/automatic/settings/receiver.json.tpl.win.in
@@ -10,13 +10,13 @@
     "ListenPort": 23123
   },
   "DataCache": {
-    "Use": true,
+    "Use": @RECEIVER_USE_CACHE@,
     "SizeGB": 1,
     "ReservedShare": 10
   },
   "Tag": "{{ env "NOMAD_ADDR_recv" }}",
-  "WriteToDisk":true,
-  "WriteToDb":true,
+  "WriteToDisk": @RECEIVER_WRITE_TO_DISK@,
+  "WriteToDb": true,
   "LogLevel" : "debug",
   "RootFolder" : "c:\\tmp\\asapo\\receiver\\files"
 }
diff --git a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp b/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp
index ffd3f90f5b5ab791586d89f3045ac60032b2d47e..d4592188276de1cd5899cdc8b0019debb9b54a19 100644
--- a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp
+++ b/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp
@@ -49,7 +49,7 @@ Args GetArgs(int argc, char* argv[]) {
 
 void GetAllFromBroker(const Args& args) {
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, args.run_name, args.token, &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, "dummy", args.run_name, args.token, &err);
 
     std::vector<asapo::FileInfos>file_infos(args.nthreads);
     auto exec_next = [&](int i) {
diff --git a/worker/api/cpp/include/worker/data_broker.h b/worker/api/cpp/include/worker/data_broker.h
index d7eb5ed592e633af0ce1d0094a54f7e3a7aeee9f..6f6e468b1ceda55668f045edad5f3f3a577aaf92 100644
--- a/worker/api/cpp/include/worker/data_broker.h
+++ b/worker/api/cpp/include/worker/data_broker.h
@@ -55,7 +55,7 @@ class DataBrokerFactory {
   public:
     static std::unique_ptr<DataBroker> CreateFolderBroker(const std::string& source_name,
             Error* error) noexcept;
-    static std::unique_ptr<DataBroker> CreateServerBroker(std::string server_name,
+    static std::unique_ptr<DataBroker> CreateServerBroker(std::string server_name, std::string source_path,
             std::string beamtime_id, std::string token,
             Error* error) noexcept;
 
diff --git a/worker/api/cpp/src/data_broker.cpp b/worker/api/cpp/src/data_broker.cpp
index 0e833b2ac2982de2c858f4ae8c5278403e2869c3..f6eac739dfdcb0897d63bafca64f77259379cdfa 100644
--- a/worker/api/cpp/src/data_broker.cpp
+++ b/worker/api/cpp/src/data_broker.cpp
@@ -31,10 +31,11 @@ std::unique_ptr<DataBroker> DataBrokerFactory::CreateFolderBroker(const std::str
     return Create<FolderDataBroker>(source_name, error);
 };
 
-std::unique_ptr<DataBroker> DataBrokerFactory::CreateServerBroker(std::string server_name,
+std::unique_ptr<DataBroker> DataBrokerFactory::CreateServerBroker(std::string server_name, std::string source_path,
         std::string beamtime_id, std::string token,
         Error* error) noexcept {
-    return Create<ServerDataBroker>(std::move(server_name), error, std::move(beamtime_id), std::move(token));
+    return Create<ServerDataBroker>(std::move(server_name), error, std::move(source_path), std::move(beamtime_id),
+                                    std::move(token));
 }
 
 
diff --git a/worker/api/cpp/src/server_data_broker.cpp b/worker/api/cpp/src/server_data_broker.cpp
index 5fa37016e9f6a5f846efd052378e31978e685113..05a5da7782a4e323fb33ac1cd84178781e931fcc 100644
--- a/worker/api/cpp/src/server_data_broker.cpp
+++ b/worker/api/cpp/src/server_data_broker.cpp
@@ -43,11 +43,12 @@ Error HttpCodeToWorkerError(const HttpCode& code) {
 }
 
 ServerDataBroker::ServerDataBroker(std::string server_uri,
+                                   std::string source_path,
                                    std::string source_name,
                                    std::string token) :
     io__{GenerateDefaultIO()}, httpclient__{DefaultHttpClient()},
     net_client__{new TcpClient()},
-server_uri_{std::move(server_uri)}, source_name_{std::move(source_name)}, token_{std::move(token)} {
+server_uri_{std::move(server_uri)}, source_path_{std::move(source_path)}, source_name_{std::move(source_name)}, token_{std::move(token)} {
 }
 
 Error ServerDataBroker::Connect() {
@@ -180,13 +181,28 @@ Error ServerDataBroker::GetDataIfNeeded(FileInfo* info, FileData* data) {
         return nullptr;
     }
 
+    if (DataCanBeInBuffer(info)) {
+        if (TryGetDataFromBuffer(info, data) == nullptr) {
+            return nullptr;
+        }
+    }
+
+    Error error;
+    *data = io__->GetDataFromFile(info->FullName(source_path_), &info->size, &error);
+    return error;
+}
+
+bool ServerDataBroker::DataCanBeInBuffer(const FileInfo* info) {
+    return info->buf_id > 0;
+}
+
+Error ServerDataBroker::TryGetDataFromBuffer(const FileInfo* info, FileData* data) {
     auto error = net_client__->GetData(info, data);
     if (error) {
         std::cout << "error from net client: " << error->Explain() << std::endl;
-        *data = io__->GetDataFromFile(info->FullName(""), &info->size, &error);
-    }
-
+    };
     return error;
+
 }
 
 }
diff --git a/worker/api/cpp/src/server_data_broker.h b/worker/api/cpp/src/server_data_broker.h
index 5398167586c6c4e2fa697d3db3d66b6f13bcdc0d..34f8eebd3aee1e7311e28aff4ed7a5ebd739c771 100644
--- a/worker/api/cpp/src/server_data_broker.h
+++ b/worker/api/cpp/src/server_data_broker.h
@@ -17,7 +17,7 @@ enum class GetImageServerOperation {
 
 class ServerDataBroker final : public asapo::DataBroker {
   public:
-    explicit ServerDataBroker(std::string server_uri, std::string source_name, std::string token);
+    explicit ServerDataBroker(std::string server_uri, std::string source_path, std::string source_name, std::string token);
     Error Connect() override;
     Error GetNext(FileInfo* info, FileData* data) override;
     Error GetLast(FileInfo* info, FileData* data) override;
@@ -34,9 +34,12 @@ class ServerDataBroker final : public asapo::DataBroker {
     void ProcessServerError(Error* err, const std::string& response, std::string* redirect_uri);
     Error ProcessRequest(std::string* response, std::string request_uri);
     Error GetImageFromServer(GetImageServerOperation op, FileInfo* info, FileData* data);
+    bool DataCanBeInBuffer(const FileInfo* info);
+    Error TryGetDataFromBuffer(const FileInfo* info, FileData* data);
     std::string OpToUriCmd(GetImageServerOperation op);
     std::string server_uri_;
     std::string current_broker_uri_;
+    std::string source_path_;
     std::string source_name_;
     std::string token_;
     uint64_t timeout_ms_ = 0;
diff --git a/worker/api/cpp/src/tcp_client.cpp b/worker/api/cpp/src/tcp_client.cpp
index ca09e8021b2c2f42bdd84ea8e89afcfab46f0daa..137fec28692219dc88b68fb83db10ad442a10dfc 100644
--- a/worker/api/cpp/src/tcp_client.cpp
+++ b/worker/api/cpp/src/tcp_client.cpp
@@ -13,6 +13,7 @@ Error TcpClient::SendGetDataRequest(SocketDescriptor sd, const FileInfo* info) c
     GenericRequestHeader request_header{kOpcodeGetBufferData, info->buf_id, info->size};
     io__->Send(sd, &request_header, sizeof(request_header), &err);
     if (err) {
+        connection_pool__->ReleaseConnection(sd);
         io__->CloseSocket(sd, nullptr);
     }
     return err;
@@ -35,6 +36,7 @@ Error TcpClient::ReceiveResponce(SocketDescriptor sd) const noexcept {
     io__->Receive(sd, &Response, sizeof(Response), &err);
     if(err != nullptr) {
         io__->CloseSocket(sd, nullptr);
+        connection_pool__->ReleaseConnection(sd);
         return err;
     }
     switch (Response.error_code) {
@@ -67,12 +69,15 @@ Error TcpClient::ReceiveData(SocketDescriptor sd, const FileInfo* info, FileData
     try {
         data_array = new uint8_t[info->size];
     } catch (...) {
+        connection_pool__->ReleaseConnection(sd);
         return ErrorTemplates::kMemoryAllocationError.Generate();
     }
     io__->Receive(sd, data_array, info->size, &err);
+    connection_pool__->ReleaseConnection(sd);
     if (!err) {
         *data = FileData{data_array};
     } else {
+        io__->CloseSocket(sd, nullptr);
         delete[] data_array;
     }
     return err;
diff --git a/worker/api/cpp/unittests/test_server_broker.cpp b/worker/api/cpp/unittests/test_server_broker.cpp
index e616add439b9362da1e574a82d3e1a92eb45ece4..ec2c856d506fc51e1cd8be0c0a1889e9047faa4a 100644
--- a/worker/api/cpp/unittests/test_server_broker.cpp
+++ b/worker/api/cpp/unittests/test_server_broker.cpp
@@ -12,7 +12,6 @@
 #include "mocking.h"
 #include "../src/tcp_client.h"
 
-
 using asapo::DataBrokerFactory;
 using asapo::DataBroker;
 using asapo::ServerDataBroker;
@@ -42,7 +41,8 @@ using testing::AllOf;
 namespace {
 
 TEST(FolderDataBroker, Constructor) {
-    auto data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "beamtime_id", "token")};
+    auto data_broker =
+    std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "path", "beamtime_id", "token")};
     ASSERT_THAT(dynamic_cast<asapo::SystemIO*>(data_broker->io__.get()), Ne(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::CurlHttpClient*>(data_broker->httpclient__.get()), Ne(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::TcpClient*>(data_broker->net_client__.get()), Ne(nullptr));
@@ -58,9 +58,14 @@ class ServerDataBrokerTests : public Test {
     std::string expected_server_uri = "test:8400";
     std::string expected_broker_uri = "broker:5005";
     std::string expected_token = "token";
+    std::string expected_path = "/tmp/beamline/beamtime";
+    std::string expected_filename = "filename";
+    std::string expected_full_path = std::string("/tmp/beamline/beamtime") + asapo::kPathSeparator + expected_filename;
 
     void SetUp() override {
-        data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker(expected_server_uri, "beamtime_id", expected_token)};
+        data_broker = std::unique_ptr<ServerDataBroker> {
+            new ServerDataBroker(expected_server_uri, expected_path, "beamtime_id", expected_token)
+        };
         data_broker->io__ = std::unique_ptr<IO> {&mock_io};
         data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
         data_broker->net_client__ = std::unique_ptr<asapo::NetClient> {&mock_netclient};
@@ -98,10 +103,18 @@ class ServerDataBrokerTests : public Test {
             return;
         }
 
-        EXPECT_CALL(mock_io, GetDataFromFile_t("name", testing::Pointee(100), _)).Times(times).
+        EXPECT_CALL(mock_io, GetDataFromFile_t(expected_full_path, testing::Pointee(100), _)).Times(times).
         WillRepeatedly(DoAll(SetArgPointee<2>(new asapo::SimpleError{"s"}), testing::Return(nullptr)));
     }
-
+    FileInfo CreateFI(uint64_t buf_id = 1) {
+        FileInfo fi;
+        fi.size = 100;
+        fi.id = 1;
+        fi.buf_id = buf_id;
+        fi.name = expected_filename;
+        fi.modify_date = std::chrono::system_clock::now();
+        return fi;
+    }
 };
 
 TEST_F(ServerDataBrokerTests, CanConnect) {
@@ -114,7 +127,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsErrorOnWrongInput) {
     ASSERT_THAT(return_code->Explain(), Eq(asapo::WorkerErrorMessage::kWrongInput));
 }
 
-
 TEST_F(ServerDataBrokerTests, GetNextUsesCorrectUri) {
     MockGetBrokerUri();
 
@@ -137,7 +149,6 @@ TEST_F(ServerDataBrokerTests, GetLastUsesCorrectUri) {
     data_broker->GetLast(&info, nullptr);
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsEOFFromHttpClient) {
     MockGetBrokerUri();
 
@@ -166,7 +177,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsNotAuthorized) {
     ASSERT_THAT(err->Explain(), HasSubstr("authorization"));
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsWrongResponseFromHttpClient) {
 
     MockGetBrokerUri();
@@ -207,8 +217,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsIfBrokerUriEmpty) {
     ASSERT_THAT(err->Explain(), AllOf(HasSubstr("broker uri"), HasSubstr("cannot")));
 }
 
-
-
 TEST_F(ServerDataBrokerTests, GetDoNotCallBrokerUriIfAlreadyFound) {
     MockGetBrokerUri();
     MockGet("error_response");
@@ -222,7 +230,6 @@ TEST_F(ServerDataBrokerTests, GetDoNotCallBrokerUriIfAlreadyFound) {
     data_broker->GetNext(&info, nullptr);
 }
 
-
 TEST_F(ServerDataBrokerTests, GetBrokerUriAgainAfterConnectionError) {
     MockGetBrokerUri();
     MockGetError();
@@ -236,11 +243,9 @@ TEST_F(ServerDataBrokerTests, GetBrokerUriAgainAfterConnectionError) {
     data_broker->GetNext(&info, nullptr);
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsEOFFromHttpClientUntilTimeout) {
     MockGetBrokerUri();
 
-
     EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll(
                 SetArgPointee<1>(HttpCode::Conflict),
                 SetArgPointee<2>(nullptr),
@@ -252,24 +257,12 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsEOFFromHttpClientUntilTimeout) {
                                                     SetArgPointee<2>(nullptr),
                                                     Return("{\"id\":1}")));
 
-
     data_broker->SetTimeout(100);
     auto err = data_broker->GetNext(&info, nullptr);
 
     ASSERT_THAT(err->Explain(), HasSubstr("timeout"));
 }
 
-
-
-FileInfo CreateFI() {
-    FileInfo fi;
-    fi.size = 100;
-    fi.id = 1;
-    fi.name = "name";
-    fi.modify_date = std::chrono::system_clock::now();
-    return fi;
-}
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsFileInfo) {
     MockGetBrokerUri();
 
@@ -288,7 +281,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsFileInfo) {
     ASSERT_THAT(info.modify_date, Eq(to_send.modify_date));
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsParseError) {
     MockGetBrokerUri();
     MockGet("error_response");
@@ -297,7 +289,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsParseError) {
     ASSERT_THAT(err->Explain(), Eq(asapo::WorkerErrorMessage::kErrorReadingSource));
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageReturnsIfNoDtataNeeded) {
     MockGetBrokerUri();
     MockGet("error_response");
@@ -308,7 +299,6 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsIfNoDtataNeeded) {
     data_broker->GetNext(&info, nullptr);
 }
 
-
 TEST_F(ServerDataBrokerTests, GetImageTriesToGetDataFromMemoryCache) {
     MockGetBrokerUri();
     auto to_send = CreateFI();
@@ -338,4 +328,21 @@ TEST_F(ServerDataBrokerTests, GetImageCallsReadFromFileIfCannotReadFromCache) {
     data_broker->GetNext(&info, &data);
 }
 
+TEST_F(ServerDataBrokerTests, GetImageCallsReadFromFileIfZeroBufId) {
+    MockGetBrokerUri();
+    auto to_send = CreateFI(0);
+    auto json = to_send.Json();
+    MockGet(json);
+
+    FileData data;
+
+
+    EXPECT_CALL(mock_netclient, GetData_t(_, _)).Times(0);
+
+    MockReadDataFromFile();
+
+    data_broker->GetNext(&info, &data);
+}
+
+
 }
diff --git a/worker/api/cpp/unittests/test_tcp_client.cpp b/worker/api/cpp/unittests/test_tcp_client.cpp
index 7f71a81c1990b7db3d41f24d279491e25be73ed9..e2e8881b01bb6948c9f8f1d543f424c844c833e8 100644
--- a/worker/api/cpp/unittests/test_tcp_client.cpp
+++ b/worker/api/cpp/unittests/test_tcp_client.cpp
@@ -104,6 +104,7 @@ class TcpClientTests : public Test {
             ));
         if (!ok) {
             EXPECT_CALL(mock_io, CloseSocket_t(sd, _));
+            EXPECT_CALL(mock_connection_pool, ReleaseConnection(sd));
         }
     }
 
@@ -118,6 +119,11 @@ class TcpClientTests : public Test {
                 A_WriteSendDataResponse(responce_code),
                 testing::ReturnArg<2>()
             ));
+        if (!ok) {
+            EXPECT_CALL(mock_io, CloseSocket_t(sd, _));
+            EXPECT_CALL(mock_connection_pool, ReleaseConnection(sd));
+        }
+
     }
 
     void ExpectGetData(asapo::SocketDescriptor sd, bool ok) {
@@ -128,6 +134,10 @@ class TcpClientTests : public Test {
                 testing::SetArgPointee<3>(ok ? nullptr : asapo::IOErrorTemplates::kTimeout.Generate().release()),
                 testing::Return(ok ? expected_size : -1)
             ));
+        if (!ok) {
+            EXPECT_CALL(mock_io, CloseSocket_t(sd, _));
+        }
+        EXPECT_CALL(mock_connection_pool, ReleaseConnection(sd));
     }
 };
 
@@ -173,7 +183,6 @@ TEST_F(TcpClientTests, GetResponceReturnsError) {
     ExpectNewConnection(false, true);
     ExpectSendDataRequest(expected_sd, true);
     ExpectGetResponce(expected_sd, false, asapo::kNetErrorNoError);
-    EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _));
 
     auto err = client->GetData(&info, &data);
 
@@ -196,7 +205,6 @@ TEST_F(TcpClientTests, GetResponceReturnsWrongRequest) {
     ExpectGetResponce(expected_sd, true, asapo::kNetErrorWrongRequest);
     EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _));
 
-
     auto err = client->GetData(&info, &data);
 
     ASSERT_THAT(err, Ne(nullptr));
diff --git a/worker/api/cpp/unittests/test_worker_api.cpp b/worker/api/cpp/unittests/test_worker_api.cpp
index 6af41b9530d4a7ec9acae32d23ed0e9d013698e8..c193314aa7b34b3451d3db82b5bdd55880e231c0 100644
--- a/worker/api/cpp/unittests/test_worker_api.cpp
+++ b/worker/api/cpp/unittests/test_worker_api.cpp
@@ -45,7 +45,7 @@ TEST_F(DataBrokerFactoryTests, FailCreateDataSourceWithEmptySource) {
 
 TEST_F(DataBrokerFactoryTests, CreateServerDataSource) {
 
-    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "beamtime_id", "token", &error);
+    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", "beamtime_id", "token", &error);
 
     ASSERT_THAT(error, Eq(nullptr));
     ASSERT_THAT(dynamic_cast<ServerDataBroker*>(data_broker.get()), Ne(nullptr));