diff --git a/CMakeModules/CodeCoverage.cmake b/CMakeModules/CodeCoverage.cmake
index c170a8a583bbf2ea89338fdfc664ddddfe8afef0..63f4a9815646a623b4af2942a5ee6c8146b592cf 100644
--- a/CMakeModules/CodeCoverage.cmake
+++ b/CMakeModules/CodeCoverage.cmake
@@ -12,7 +12,7 @@
 #    and/or other materials provided with the distribution.
 #
 # 3. Neither the name of the copyright holder nor the names of its contributors
-#    may be used to endorse or promote products derived from this software without
+#    may be used to endorse or promote products processed from this software without
 #    specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
diff --git a/authorizer/src/asapo_authorizer/server/authorize.go b/authorizer/src/asapo_authorizer/server/authorize.go
index 42ee3cd020546075e3a30e72a20fddd68eefe74c..4e68c8c2d913bf80f0bf819b33db47c723086e29 100644
--- a/authorizer/src/asapo_authorizer/server/authorize.go
+++ b/authorizer/src/asapo_authorizer/server/authorize.go
@@ -14,6 +14,7 @@ type SourceCredentials struct {
 	Beamline   string
 	Stream     string
 	Token      string
+	Type 	   string
 }
 
 type authorizationRequest struct {
@@ -24,10 +25,10 @@ type authorizationRequest struct {
 func getSourceCredentials(request authorizationRequest) (SourceCredentials, error) {
 	vals := strings.Split(request.SourceCredentials, "%")
 
-	if len(vals) != 4 {
+	if len(vals) != 5 {
 		return SourceCredentials{}, errors.New("cannot get source credentials from " + request.SourceCredentials)
 	}
-	creds := SourceCredentials{vals[0], vals[1], vals[2], vals[3]}
+	creds := SourceCredentials{vals[1], vals[2], vals[3], vals[4],vals[0]}
 	if creds.Stream == "" {
 		creds.Stream = "detector"
 	}
@@ -138,6 +139,7 @@ func alwaysAllowed(creds SourceCredentials) (beamtimeMeta, bool) {
 	for _, pair := range settings.AlwaysAllowedBeamtimes {
 		if pair.BeamtimeId == creds.BeamtimeId {
 			pair.Stream = creds.Stream
+			pair.Type = creds.Type
 			return pair, true
 		}
 	}
@@ -200,6 +202,10 @@ func findMeta(creds SourceCredentials) (beamtimeMeta, error) {
 		meta, err = findBeamtimeMetaFromBeamline(creds.Beamline)
 	}
 
+	if creds.Type == "processed" {
+		meta.OnlinePath = ""
+	}
+
 	if (err != nil) {
 		log.Error(err.Error())
 		return beamtimeMeta{}, err
@@ -243,8 +249,9 @@ func authorize(request authorizationRequest, creds SourceCredentials) (beamtimeM
 	}
 
 	meta.Stream = creds.Stream
+	meta.Type = creds.Type
 
-	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " + meta.Beamline)
+	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " + meta.Beamline+", type "+meta.Type)
 	return meta, nil
 }
 
diff --git a/authorizer/src/asapo_authorizer/server/authorize_test.go b/authorizer/src/asapo_authorizer/server/authorize_test.go
index ca2c21f7689272ce12281f597e91fcdda7bb4293..d90a2f90375439f9c24072ed4a3d3b39d17bc2d8 100644
--- a/authorizer/src/asapo_authorizer/server/authorize_test.go
+++ b/authorizer/src/asapo_authorizer/server/authorize_test.go
@@ -56,16 +56,16 @@ var credTests = [] struct {
 	ok bool
 	message string
 } {
-	{"asapo_test%auto%%", SourceCredentials{"asapo_test","auto","detector",""},true,"auto beamline, stream and no token"},
-	{"asapo_test%auto%%token", SourceCredentials{"asapo_test","auto","detector","token"},true,"auto beamline, stream"},
-	{"asapo_test%auto%stream%", SourceCredentials{"asapo_test","auto","stream",""},true,"auto beamline, no token"},
-	{"asapo_test%auto%stream%token", SourceCredentials{"asapo_test","auto","stream","token"},true,"auto beamline,stream, token"},
-	{"asapo_test%beamline%stream%token", SourceCredentials{"asapo_test","beamline","stream","token"},true,"all set"},
-	{"auto%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token"},true,"auto beamtime"},
-	{"auto%auto%stream%token", SourceCredentials{},false,"auto beamtime and beamline"},
-	{"%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token"},true,"empty beamtime"},
-	{"asapo_test%%stream%token", SourceCredentials{"asapo_test","auto","stream","token"},true,"empty bealine"},
-	{"%%stream%token", SourceCredentials{},false,"both empty"},
+	{"processed%asapo_test%auto%%", SourceCredentials{"asapo_test","auto","detector","","processed"},true,"auto beamline, stream and no token"},
+	{"processed%asapo_test%auto%%token", SourceCredentials{"asapo_test","auto","detector","token","processed"},true,"auto beamline, stream"},
+	{"processed%asapo_test%auto%stream%", SourceCredentials{"asapo_test","auto","stream","","processed"},true,"auto beamline, no token"},
+	{"processed%asapo_test%auto%stream%token", SourceCredentials{"asapo_test","auto","stream","token","processed"},true,"auto beamline,stream, token"},
+	{"processed%asapo_test%beamline%stream%token", SourceCredentials{"asapo_test","beamline","stream","token","processed"},true,"all set"},
+	{"processed%auto%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token","processed"},true,"auto beamtime"},
+	{"raw%auto%auto%stream%token", SourceCredentials{},false,"auto beamtime and beamline"},
+	{"raw%%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token","raw"},true,"empty beamtime"},
+	{"raw%asapo_test%%stream%token", SourceCredentials{"asapo_test","auto","stream","token","raw"},true,"empty bealine"},
+	{"raw%%%stream%token", SourceCredentials{},false,"both empty"},
 }
 
 func TestSplitCreds(t *testing.T) {
@@ -84,8 +84,8 @@ func TestSplitCreds(t *testing.T) {
 }
 
 func TestAuthorizeDefaultOK(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf"}})
-	request :=  makeRequest(authorizationRequest{"asapo_test%%%","host"})
+	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf",""}})
+	request :=  makeRequest(authorizationRequest{"processed%asapo_test%%%","host"})
 	w := doPostRequest("/authorize",request)
 
 	body, _ := ioutil.ReadAll(w.Body)
@@ -93,6 +93,7 @@ func TestAuthorizeDefaultOK(t *testing.T) {
 	assert.Contains(t, string(body), "asapo_test", "")
 	assert.Contains(t, string(body), "beamline", "")
 	assert.Contains(t, string(body), "detector", "")
+	assert.Contains(t, string(body), "processed", "")
 
 	assert.Equal(t, http.StatusOK, w.Code, "")
 }
@@ -105,6 +106,7 @@ var beamtime_meta_online =`
 `
 
 var authTests = [] struct {
+	source_type string
 	beamtime_id string
 	beamline string
 	stream string
@@ -112,13 +114,14 @@ var authTests = [] struct {
 	status int
 	message string
 }{
-	{"test","auto","stream", prepareToken("test"),http.StatusOK,"user stream with correct token"},
-	{"test_online","auto","stream", prepareToken("test_online"),http.StatusOK,"with online path"},
-	{"test1","auto","stream", prepareToken("test1"),http.StatusUnauthorized,"correct token, beamtime not found"},
-	{"test","auto","stream", prepareToken("wrong"),http.StatusUnauthorized,"user stream with wrong token"},
-	{"test","auto","detector_aaa", prepareToken("test"),http.StatusUnauthorized,"detector stream with correct token and wroung source"},
-	{"test","bl1","stream", prepareToken("test"),http.StatusOK,"correct beamline given"},
-	{"test","bl2","stream", prepareToken("test"),http.StatusUnauthorized,"incorrect beamline given"},
+	{"processed","test","auto","stream", prepareToken("test"),http.StatusOK,"user stream with correct token"},
+	{"processed","test_online","auto","stream", prepareToken("test_online"),http.StatusOK,"with online path, processed type"},
+	{"raw","test_online","auto","stream", prepareToken("test_online"),http.StatusOK,"with online path, raw type"},
+	{"processed","test1","auto","stream", prepareToken("test1"),http.StatusUnauthorized,"correct token, beamtime not found"},
+	{"processed","test","auto","stream", prepareToken("wrong"),http.StatusUnauthorized,"user stream with wrong token"},
+	{"processed","test","auto","detector_aaa", prepareToken("test"),http.StatusUnauthorized,"detector stream with correct token and wroung source"},
+	{"processed","test","bl1","stream", prepareToken("test"),http.StatusOK,"correct beamline given"},
+	{"processed","test","bl2","stream", prepareToken("test"),http.StatusUnauthorized,"incorrect beamline given"},
 }
 func TestAuthorizeWithToken(t *testing.T) {
 	allowBeamlines([]beamtimeMeta{})
@@ -134,7 +137,7 @@ func TestAuthorizeWithToken(t *testing.T) {
 	defer 	os.RemoveAll("bl1")
 
 	for _, test := range authTests {
-		request :=  makeRequest(authorizationRequest{test.beamtime_id+"%"+test.beamline+"%"+test.stream+"%"+test.token,"host"})
+		request :=  makeRequest(authorizationRequest{test.source_type+"%"+test.beamtime_id+"%"+test.beamline+"%"+test.stream+"%"+test.token,"host"})
 		w := doPostRequest("/authorize",request)
 
 		body, _ := ioutil.ReadAll(w.Body)
@@ -142,17 +145,19 @@ func TestAuthorizeWithToken(t *testing.T) {
 			body_str:=string(body)
 			body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
 			body_str = strings.Replace(body_str,"//","/",-1)
-			assert.Contains(t, body_str, test.beamtime_id, "")
-			assert.Contains(t, body_str, "bl1", "")
-			assert.Contains(t, body_str, "stream", "")
-			assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test", "")
-			if (test.beamtime_id == "test_online") {
-				assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test_online", "")
-				assert.Contains(t, body_str, "bl1/current", "")
+			assert.Contains(t, body_str, test.beamtime_id, test.message)
+			assert.Contains(t, body_str, "bl1", test.message)
+			assert.Contains(t, body_str, "stream", test.message)
+			assert.Contains(t, body_str, "type", test.message)
+			assert.Contains(t, body_str, test.source_type, test.message)
+			assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test", test.message)
+			if (test.beamtime_id == "test_online" && test.source_type == "raw") {
+				assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test_online", test.message)
+				assert.Contains(t, body_str, "bl1/current", test.message)
 			} else {
-				assert.NotContains(t, body_str, "current", "")
+				assert.NotContains(t, body_str, "current", test.message)
 			}
-			assert.Contains(t, body_str, test.stream, "")
+			assert.Contains(t, body_str, test.stream, test.message)
 		}
 
 		assert.Equal(t, test.status, w.Code, test.message)
@@ -221,7 +226,7 @@ func TestAuthorizeBeamline(t *testing.T) {
 	defer 	os.RemoveAll("p07")
 
 	for _, test := range authBeamlineTests {
-		request :=  makeRequest(authorizationRequest{"auto%"+test.beamline+"%stream%"+test.token,"host"})
+		request :=  makeRequest(authorizationRequest{"raw%auto%"+test.beamline+"%stream%"+test.token,"host"})
 		w := doPostRequest("/authorize",request)
 
 		body, _ := ioutil.ReadAll(w.Body)
@@ -229,11 +234,13 @@ func TestAuthorizeBeamline(t *testing.T) {
 		body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
 		body_str = strings.Replace(body_str,"//","/",-1)
 		if test.status==http.StatusOK {
-			assert.Contains(t, body_str, test.beamtime_id, "")
-			assert.Contains(t, body_str, test.beamline, "")
-			assert.Contains(t, body_str, "asap3/petra3/gpfs/p07/2020/data/11111111", "")
-			assert.Contains(t, body_str, "p07/current", "")
-			assert.Contains(t, body_str, "stream", "")
+			assert.Contains(t, body_str, test.beamtime_id, test.message)
+			assert.Contains(t, body_str, test.beamline, test.message)
+			assert.Contains(t, body_str, test.beamline, test.message)
+			assert.Contains(t, body_str, "raw", test.message)
+			assert.Contains(t, body_str, "asap3/petra3/gpfs/p07/2020/data/11111111", test.message)
+			assert.Contains(t, body_str, "p07/current", test.message)
+			assert.Contains(t, body_str, "stream", test.message)
 		}
 
 		assert.Equal(t, test.status, w.Code, test.message)
@@ -242,7 +249,7 @@ func TestAuthorizeBeamline(t *testing.T) {
 
 
 func TestNotAuthorized(t *testing.T) {
-	request :=  makeRequest(authorizationRequest{"any_id%%%","host"})
+	request :=  makeRequest(authorizationRequest{"raw%any_id%%%","host"})
 	w := doPostRequest("/authorize",request)
 	assert.Equal(t, http.StatusUnauthorized, w.Code, "")
 }
@@ -260,7 +267,7 @@ func TestAuthorizeWrongPath(t *testing.T) {
 }
 
 func TestDoNotAuthorizeIfNotInAllowed(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf"}})
+	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf",""}})
 
 	request :=  authorizationRequest{"asapo_test%%","host"}
 	creds,_ := getSourceCredentials(request)
@@ -295,7 +302,7 @@ func TestAuthorizeWithFile(t *testing.T) {
 	ioutil.WriteFile("127.0.0.1", []byte("bl1"), 0644)
 
 
-	request := authorizationRequest{"11003924%%%","127.0.0.1"}
+	request := authorizationRequest{"raw%11003924%%%","127.0.0.1"}
 	w := doPostRequest("/authorize",makeRequest(request))
 
 	body, _ := ioutil.ReadAll(w.Body)
@@ -305,10 +312,11 @@ func TestAuthorizeWithFile(t *testing.T) {
 	assert.Contains(t,body_str,"tf/gpfs/bl1/2019/data/11003924")
 	assert.Contains(t, body_str, "11003924", "")
 	assert.Contains(t, body_str, "bl1", "")
+	assert.Contains(t, body_str, "raw", "")
 	assert.Contains(t, body_str, "detector", "")
 	assert.Equal(t, http.StatusOK, w.Code, "")
 
-	request = authorizationRequest{"wrong%%%","127.0.0.1"}
+	request = authorizationRequest{"raw%wrong%%%","127.0.0.1"}
 	w = doPostRequest("/authorize",makeRequest(request))
 	assert.Equal(t, http.StatusUnauthorized, w.Code, "")
 
diff --git a/authorizer/src/asapo_authorizer/server/folder_token.go b/authorizer/src/asapo_authorizer/server/folder_token.go
index 8c50ae06224924888ea1ce450a73e16e37731769..bb69d5b9b34a56169a596c8c2cbec13db7fc6fd7 100644
--- a/authorizer/src/asapo_authorizer/server/folder_token.go
+++ b/authorizer/src/asapo_authorizer/server/folder_token.go
@@ -66,7 +66,7 @@ func extractFolderTokenrequest(r *http.Request) (folderTokenRequest,error) {
 }
 
 func checkBeamtimeFolder(request folderTokenRequest) error {
-	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId,"auto","",""})
+	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId,"auto","","",""})
 	if err != nil {
 		log.Error("cannot get beamtime meta"+err.Error())
 		return err
diff --git a/authorizer/src/asapo_authorizer/server/server.go b/authorizer/src/asapo_authorizer/server/server.go
index c2fbe406905883e4e02a818d56880e8b0da71e43..40081a91930301ff593fffdc393f6e2d767590ff 100644
--- a/authorizer/src/asapo_authorizer/server/server.go
+++ b/authorizer/src/asapo_authorizer/server/server.go
@@ -10,6 +10,7 @@ type  beamtimeMeta struct {
 	Stream string       `json:"stream"`
 	OfflinePath string `json:"core-path"`
 	OnlinePath string `json:"beamline-path"`
+	Type string `json:"source-type"`
 }
 
 type serverSettings struct {
diff --git a/broker/src/asapo_broker/server/statistics.go b/broker/src/asapo_broker/server/statistics.go
index 1dff498f09d58576b1bf175acc60ddc130fcffbf..13b0103306968508b6c3141de9733d9d3cea7066 100644
--- a/broker/src/asapo_broker/server/statistics.go
+++ b/broker/src/asapo_broker/server/statistics.go
@@ -58,11 +58,9 @@ func (st *serverStatistics) WriteStatistic() (err error) {
 func (st *serverStatistics) Monitor() {
 	for {
 		time.Sleep(1000 * time.Millisecond)
-		logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 		if err := st.WriteStatistic(); err != nil {
+		    logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 			log.Error(logstr + " - " + err.Error())
-		} else {
-			log.Debug(logstr)
 		}
 		st.Reset()
 	}
diff --git a/common/cpp/include/common/data_structs.h b/common/cpp/include/common/data_structs.h
index ad749ede8580daafc9efbbb4202241775aceeb35..771b1a5fa32c984be7bfaf2b77b211315a15d754 100644
--- a/common/cpp/include/common/data_structs.h
+++ b/common/cpp/include/common/data_structs.h
@@ -58,14 +58,21 @@ struct DataSet {
 
 using SubDirList = std::vector<std::string>;
 
+enum class SourceType {
+  kProcessed,
+  kRaw
+};
+
+Error GetSourceTypeFromString(std::string stype,SourceType *type);
+std::string GetStringFromSourceType(SourceType type);
 
 struct SourceCredentials {
-    SourceCredentials(std::string beamtime, std::string beamline, std::string stream, std::string token):
+    SourceCredentials(SourceType type, std::string beamtime, std::string beamline, std::string stream, std::string token):
         beamtime_id{std::move(beamtime)},
         beamline{std::move(beamline)},
         stream{std::move(stream)},
-        user_token{std::move(token)} {
-    };
+        user_token{std::move(token)},
+        type{type}{};
     SourceCredentials() {};
     static const std::string kDefaultStream;
     static const std::string kDefaultBeamline;
@@ -74,8 +81,9 @@ struct SourceCredentials {
     std::string beamline;
     std::string stream;
     std::string user_token;
+    SourceType type = SourceType::kProcessed;
     std::string GetString() {
-        return beamtime_id + "%" + beamline + "%" + stream + "%" + user_token;
+        return (type==SourceType::kRaw?std::string("raw"):std::string("processed")) + "%"+ beamtime_id + "%" + beamline + "%" + stream + "%" + user_token;
     };
 };
 
diff --git a/common/cpp/src/data_structs/data_structs.cpp b/common/cpp/src/data_structs/data_structs.cpp
index 31b82a2bee76df3b13b7f723649d99315aab4bd4..f9eea8b2bcda65a645cc46fd2b17d67bdf099f51 100644
--- a/common/cpp/src/data_structs/data_structs.cpp
+++ b/common/cpp/src/data_structs/data_structs.cpp
@@ -23,6 +23,25 @@ const std::string SourceCredentials::kDefaultStream = "detector";
 const std::string SourceCredentials::kDefaultBeamline = "auto";
 const std::string SourceCredentials::kDefaultBeamtimeId = "auto";
 
+std::string GetStringFromSourceType(SourceType type) {
+    switch (type) {
+        case SourceType::kRaw:return "raw";
+        case SourceType::kProcessed:return "processed";
+    }
+}
+
+Error GetSourceTypeFromString(std::string stype,SourceType *type) {
+    Error err;
+    if (stype=="raw") {
+        *type = SourceType::kRaw;
+        return nullptr;
+    } else if (stype=="processed") {
+        *type = SourceType::kProcessed;
+        return nullptr;
+    } else {
+        return TextError("cannot parse error type: "+stype);
+    }
+}
 
 std::string FileInfo::Json() const {
     auto nanoseconds_from_epoch = std::chrono::time_point_cast<std::chrono::nanoseconds>(modify_date).
diff --git a/common/cpp/src/system_io/system_io_windows.cpp b/common/cpp/src/system_io/system_io_windows.cpp
index 1fb5c03250d0a2e4a56e99bb29158e3d5a78f9d1..52cb799e5423e774e3286556b6880401090bd45f 100644
--- a/common/cpp/src/system_io/system_io_windows.cpp
+++ b/common/cpp/src/system_io/system_io_windows.cpp
@@ -61,6 +61,7 @@ Error IOErrorFromGetLastError() {
     case WSAECONNREFUSED:
         return IOErrorTemplates::kConnectionRefused.Generate();
     case ERROR_FILE_EXISTS:
+    case ERROR_ALREADY_EXISTS:
         return IOErrorTemplates::kFileAlreadyExists.Generate();
     default:
         std::cout << "[IOErrorFromGetLastError] Unknown error code: " << last_error << std::endl;
diff --git a/common/cpp/unittests/data_structs/test_data_structs.cpp b/common/cpp/unittests/data_structs/test_data_structs.cpp
index 6efe76354b5893ee97e0154dfb1034cdce35187c..ffd8d7f86aa725129775c91848a1f03ee599b0c8 100644
--- a/common/cpp/unittests/data_structs/test_data_structs.cpp
+++ b/common/cpp/unittests/data_structs/test_data_structs.cpp
@@ -7,6 +7,8 @@
 
 using asapo::FileInfo;
 using asapo::StreamInfo;
+using asapo::SourceType;
+using asapo::SourceCredentials;
 
 using ::testing::AtLeast;
 using ::testing::Eq;
@@ -198,5 +200,38 @@ TEST(StreamInfo, ConvertToJson) {
     ASSERT_THAT(expected_json, Eq(json));
 }
 
+TEST(SourceCredentials, ConvertToString) {
+    auto sc = SourceCredentials{SourceType::kRaw,"beamtime","beamline","stream","token"};
+    std::string expected1= "raw%beamtime%beamline%stream%token";
+    std::string expected2= "processed%beamtime%beamline%stream%token";
+
+    auto res1 = sc.GetString();
+    sc.type = asapo::SourceType::kProcessed;
+    auto res2 = sc.GetString();
+
+    ASSERT_THAT(res1, Eq(expected1));
+    ASSERT_THAT(res2, Eq(expected2));
+}
+
+TEST(SourceCredentials, SourceTypeFromString) {
+    SourceType type1,type2,type3;
+
+    auto err1=GetSourceTypeFromString("raw",&type1);
+    auto err2=GetSourceTypeFromString("processed",&type2);
+    auto err3=GetSourceTypeFromString("bla",&type3);
+
+    ASSERT_THAT(err1, Eq(nullptr));
+    ASSERT_THAT(type1, Eq(SourceType::kRaw));
+    ASSERT_THAT(err2, Eq(nullptr));
+    ASSERT_THAT(type2, Eq(SourceType::kProcessed));
+    ASSERT_THAT(err3, Ne(nullptr));
+}
+
+TEST(SourceCredentials, DefaultSourceTypeInSourceCreds) {
+    SourceCredentials sc;
+
+    ASSERT_THAT(sc.type, Eq(SourceType::kProcessed));
+}
+
 
 }
diff --git a/consumer/api/cpp/unittests/test_consumer_api.cpp b/consumer/api/cpp/unittests/test_consumer_api.cpp
index 90e5b0b0182c91f63a0a1b03d056a809b76c0d63..c09f0b2f470795e3b1c8361206233cdc56aa7691 100644
--- a/consumer/api/cpp/unittests/test_consumer_api.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_api.cpp
@@ -27,7 +27,7 @@ class DataBrokerFactoryTests : public Test {
 
 TEST_F(DataBrokerFactoryTests, CreateServerDataSource) {
 
-    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", false, asapo::SourceCredentials{"beamtime_id", "", "", "token"}, &error);
+    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", false, asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"}, &error);
 
     ASSERT_THAT(error, Eq(nullptr));
     ASSERT_THAT(dynamic_cast<ServerDataBroker*>(data_broker.get()), Ne(nullptr));
diff --git a/consumer/api/cpp/unittests/test_server_broker.cpp b/consumer/api/cpp/unittests/test_server_broker.cpp
index b19620c467582a64b400469adf053b09879f4911..0443b902168e127696a0f89ed372f147e083a712 100644
--- a/consumer/api/cpp/unittests/test_server_broker.cpp
+++ b/consumer/api/cpp/unittests/test_server_broker.cpp
@@ -46,7 +46,7 @@ namespace {
 TEST(FolderDataBroker, Constructor) {
     auto data_broker =
     std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "path", false,
-                asapo::SourceCredentials{"beamtime_id", "", "", "token"})
+                asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"})
     };
     ASSERT_THAT(dynamic_cast<asapo::SystemIO*>(data_broker->io__.get()), Ne(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::CurlHttpClient*>(data_broker->httpclient__.get()), Ne(nullptr));
@@ -87,10 +87,10 @@ class ServerDataBrokerTests : public Test {
     void AssertSingleFileTransfer();
     void SetUp() override {
         data_broker = std::unique_ptr<ServerDataBroker> {
-            new ServerDataBroker(expected_server_uri, expected_path, true, asapo::SourceCredentials{expected_beamtime_id, "", expected_stream, expected_token})
+            new ServerDataBroker(expected_server_uri, expected_path, true, asapo::SourceCredentials{asapo::SourceType::kProcessed,expected_beamtime_id, "", expected_stream, expected_token})
         };
         fts_data_broker = std::unique_ptr<ServerDataBroker> {
-            new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{expected_beamtime_id, "", expected_stream, expected_token})
+            new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{asapo::SourceType::kProcessed,expected_beamtime_id, "", expected_stream, expected_token})
         };
         data_broker->io__ = std::unique_ptr<IO> {&mock_io};
         data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
@@ -177,7 +177,7 @@ TEST_F(ServerDataBrokerTests, DefaultStreamIsDetector) {
     data_broker->httpclient__.release();
     data_broker->net_client__.release();
     data_broker = std::unique_ptr<ServerDataBroker> {
-        new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{"beamtime_id", "", "", expected_token})
+        new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", expected_token})
     };
     data_broker->io__ = std::unique_ptr<IO> {&mock_io};
     data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
diff --git a/examples/consumer/getnext_broker/getnext_broker.cpp b/examples/consumer/getnext_broker/getnext_broker.cpp
index a792e1f2199cab85cfa32f01e1ae6cfca3e43b14..d1d5c95d0ee0484b94ff6cde63e5eb0df56020f5 100644
--- a/examples/consumer/getnext_broker/getnext_broker.cpp
+++ b/examples/consumer/getnext_broker/getnext_broker.cpp
@@ -55,7 +55,7 @@ std::vector<std::thread> StartThreads(const Args& params,
         asapo::FileInfo fi;
         Error err;
         auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.file_path, true,
-                      asapo::SourceCredentials{params.beamtime_id, "", params.stream, params.token}, &err);
+                      asapo::SourceCredentials{asapo::SourceType::kProcessed,params.beamtime_id, "", params.stream, params.token}, &err);
 
         broker->SetTimeout((uint64_t) params.timeout_ms);
         asapo::FileData data;
diff --git a/examples/pipeline/in_to_out/check_linux.sh b/examples/pipeline/in_to_out/check_linux.sh
index 8d5d293f9be46fd6ddc7e79e92c850ff5155ca59..b26364222bfea03365fda80cc441bed4f4d10ec3 100644
--- a/examples/pipeline/in_to_out/check_linux.sh
+++ b/examples/pipeline/in_to_out/check_linux.sh
@@ -32,9 +32,10 @@ Cleanup() {
     nomad stop broker
     nomad stop receiver
     nomad stop authorizer
-	echo "db.dropDatabase()" | mongo ${indatabase_name}
-	echo "db.dropDatabase()" | mongo ${outdatabase_name}
-	rm -rf file1 file2 file3
+	  echo "db.dropDatabase()" | mongo ${indatabase_name}
+	  echo "db.dropDatabase()" | mongo ${outdatabase_name}
+    echo "db.dropDatabase()" | mongo ${outdatabase_name2}
+	  rm -rf processed
     rm -rf ${receiver_root_folder}
     rm -rf out out2
 
@@ -47,14 +48,14 @@ nomad run receiver.nmd
 nomad run authorizer.nmd
 
 mkdir -p $receiver_folder
-
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed/file1
+echo hello2 > processed/file2
+echo hello3 > processed/file3
 
 for i in `seq 1 3`;
 do
-	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
+	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'processed/file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
 done
 
 sleep 1
@@ -66,11 +67,11 @@ cat out | grep "Sent 3 file(s)"
 
 echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name} | tee /dev/stderr | grep file1_${stream_out}
 
-cat ${receiver_folder}/file1_${stream_out} | grep hello1
-cat ${receiver_folder}/file2_${stream_out} | grep hello2
-cat ${receiver_folder}/file3_${stream_out} | grep hello3
+cat ${receiver_folder}/processed/file1_${stream_out} | grep hello1
+cat ${receiver_folder}/processed/file2_${stream_out} | grep hello2
+cat ${receiver_folder}/processed/file3_${stream_out} | grep hello3
 
 $1 127.0.0.1:8400 $source_path $beamtime_id $stream_in $stream_out2 $token 2 1000 25000 0  > out2
 cat out2
-test ! -f ${receiver_folder}/file1_${stream_out2}
-echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name2} | tee /dev/stderr | grep ./file1
+test ! -f ${receiver_folder}/processed/file1_${stream_out2}
+echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name2} | tee /dev/stderr | grep processed/file1
diff --git a/examples/pipeline/in_to_out/check_windows.bat b/examples/pipeline/in_to_out/check_windows.bat
index 9b575777b8fb97ab7f155e0e94c414cafa7fe038..9d45718a5bcb01e83849f6d7dfc02596a6a4195b 100644
--- a/examples/pipeline/in_to_out/check_windows.bat
+++ b/examples/pipeline/in_to_out/check_windows.bat
@@ -20,13 +20,14 @@ SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
 
 call start_services.bat
 
-for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
+for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"processed\\file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
 
 mkdir %receiver_folder%
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed\file1
+echo hello2 > processed\file2
+echo hello3 > processed\file3
 
 
 "%1" 127.0.0.1:8400 %source_path% %beamtime_id%  %stream_in% %stream_out% %token% 2 1000 25000 1 > out
@@ -36,9 +37,9 @@ findstr /I /L /C:"Sent 3 file(s)" out || goto :error
 
 echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name% | findstr  /c:"file1_%stream_out%"  || goto :error
 
-findstr /I /L /C:"hello1" %receiver_folder%\file1_%stream_out% || goto :error
-findstr /I /L /C:"hello2" %receiver_folder%\file2_%stream_out% || goto :error
-findstr /I /L /C:"hello3" %receiver_folder%\file3_%stream_out% || goto :error
+findstr /I /L /C:"hello1" %receiver_folder%\processed\file1_%stream_out% || goto :error
+findstr /I /L /C:"hello2" %receiver_folder%\processed\file2_%stream_out% || goto :error
+findstr /I /L /C:"hello3" %receiver_folder%\processed\file3_%stream_out% || goto :error
 
 
 "%1" 127.0.0.1:8400 %source_path% %beamtime_id%  %stream_in% %stream_out2% %token% 2 1000 25000 0 > out2
@@ -47,7 +48,7 @@ findstr /I /L /C:"Processed 3 file(s)" out2 || goto :error
 findstr /I /L /C:"Sent 3 file(s)" out2 || goto :error
 
 
-echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name2% | findstr /c:".\\\\file1" || goto :error
+echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name2% | findstr /c:"file1" || goto :error
 
 
 goto :clean
@@ -63,4 +64,4 @@ echo db.dropDatabase() | %mongo_exe% %indatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name2%
 rmdir /S /Q %receiver_root_folder%
-del file1 file2 file3 out out2
+rmdir /S /Q processed
diff --git a/examples/pipeline/in_to_out/in_to_out.cpp b/examples/pipeline/in_to_out/in_to_out.cpp
index e9e794cb660b8ed78b6ca5b6cc5f8b8bbe188ce8..9001d99834a99c7007d7b56a0faf482e06fa4d41 100644
--- a/examples/pipeline/in_to_out/in_to_out.cpp
+++ b/examples/pipeline/in_to_out/in_to_out.cpp
@@ -12,6 +12,7 @@
 
 #include "asapo_consumer.h"
 #include "asapo_producer.h"
+#include "preprocessor/definitions.h"
 
 using std::chrono::system_clock;
 using asapo::Error;
@@ -65,7 +66,7 @@ int ProcessError(const Error& err) {
 
 BrokerPtr CreateBrokerAndGroup(const Args& args, Error* err) {
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, args.file_path, true,
-                  asapo::SourceCredentials{args.beamtime_id, "", args.stream_in, args.token}, err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", args.stream_in, args.token}, err);
     if (*err) {
         return nullptr;
     }
@@ -103,7 +104,7 @@ void SendDataDownstreamThePipeline(const Args& args, const asapo::FileInfo& fi,
         header.file_name += "_" + args.stream_out;
         err_send = producer->SendData(header, std::move(data), asapo::kDefaultIngestMode, ProcessAfterSend);
     } else {
-        header.file_name = args.file_path + "/" + header.file_name;
+        header.file_name = args.file_path + asapo::kPathSeparator + header.file_name;
         err_send = producer->SendData(header, nullptr, asapo::IngestModeFlags::kTransferMetaDataOnly, ProcessAfterSend);
         std::cout << err_send << std::endl;
     }
@@ -188,7 +189,7 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.server, args.nthreads,
                                             asapo::RequestHandlerType::kTcp,
-                                            asapo::SourceCredentials{args.beamtime_id, "", args.stream_out, args.token }, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", args.stream_out, args.token }, 60, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/examples/pipeline/in_to_out_python/check_linux.sh b/examples/pipeline/in_to_out_python/check_linux.sh
index b1780ca9beab0634fb69b5ad644bc4001258f5d2..c69e99e860b758442c5a6cfd45cf064bc2b43f94 100644
--- a/examples/pipeline/in_to_out_python/check_linux.sh
+++ b/examples/pipeline/in_to_out_python/check_linux.sh
@@ -35,8 +35,8 @@ Cleanup() {
     nomad stop receiver
     nomad stop authorizer
     echo "db.dropDatabase()" | mongo ${indatabase_name}
-	echo "db.dropDatabase()" | mongo ${outdatabase_name}
-	rm -rf file1 file2 file3
+  	echo "db.dropDatabase()" | mongo ${outdatabase_name}
+  	rm -rf processed
     rm -rf ${receiver_root_folder}
     rm -rf out
 
@@ -50,13 +50,14 @@ nomad run authorizer.nmd
 
 mkdir -p $receiver_folder
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed/file1
+echo hello2 > processed/file2
+echo hello3 > processed/file3
 
 for i in `seq 1 3`;
 do
-	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
+	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'processed/file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
 done
 
 sleep 1
@@ -71,6 +72,6 @@ cat out | grep "Sent 3 file(s)"
 
 echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name} | tee /dev/stderr | grep "file1_${stream_out}"
 
-cat ${receiver_folder}/file1_${stream_out} | grep hello1
-cat ${receiver_folder}/file2_${stream_out} | grep hello2
-cat ${receiver_folder}/file3_${stream_out} | grep hello3
+cat ${receiver_folder}/processed/file1_${stream_out} | grep hello1
+cat ${receiver_folder}/processed/file2_${stream_out} | grep hello2
+cat ${receiver_folder}/processed/file3_${stream_out} | grep hello3
diff --git a/examples/pipeline/in_to_out_python/check_windows.bat b/examples/pipeline/in_to_out_python/check_windows.bat
index b93a7f38c5ac641a5cb51e34b678d1f0debbaa2e..c22726a793ea8c90bef06d3b9a6a07bd2a764094 100644
--- a/examples/pipeline/in_to_out_python/check_windows.bat
+++ b/examples/pipeline/in_to_out_python/check_windows.bat
@@ -22,13 +22,14 @@ SET nthreads=4
 
 call start_services.bat
 
-for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
+for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"processed\\file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
 
 mkdir %receiver_folder%
+mkdir processed
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+echo hello1 > processed\file1
+echo hello2 > processed\file2
+echo hello3 > processed\file3
 
 set PYTHONPATH=%2;%3
 
@@ -40,9 +41,9 @@ findstr /I /L /C:"Sent 3 file(s)" out || goto :error
 
 echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name% | findstr  /c:"file1_%stream_out%"  || goto :error
 
-findstr /I /L /C:"hello1" %receiver_folder%\file1_%stream_out% || goto :error
-findstr /I /L /C:"hello2" %receiver_folder%\file2_%stream_out% || goto :error
-findstr /I /L /C:"hello3" %receiver_folder%\file3_%stream_out% || goto :error
+findstr /I /L /C:"hello1" %receiver_folder%\processed\file1_%stream_out% || goto :error
+findstr /I /L /C:"hello2" %receiver_folder%\processed\file2_%stream_out% || goto :error
+findstr /I /L /C:"hello3" %receiver_folder%\processed\file3_%stream_out% || goto :error
 
 
 goto :clean
@@ -56,4 +57,4 @@ call stop_services.bat
 echo db.dropDatabase() | %mongo_exe% %indatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name%
 rmdir /S /Q %receiver_root_folder%
-del file1 file2 file3 out
+rmdir /S /Q processed
diff --git a/examples/pipeline/in_to_out_python/in_to_out.py b/examples/pipeline/in_to_out_python/in_to_out.py
index e3444b92a91b28ab1dcd5aa83ab6cfa9fdc8318e..df2b1eb07a112ee5c299a7168e288591d2a57117 100644
--- a/examples/pipeline/in_to_out_python/in_to_out.py
+++ b/examples/pipeline/in_to_out_python/in_to_out.py
@@ -30,7 +30,7 @@ transfer_data=int(transfer_data)>0
 
 broker = asapo_consumer.create_server_broker(source,path, True,beamtime,stream_in,token,timeout_s*1000)
 
-producer  = asapo_producer.create_producer(source,beamtime,'auto', stream_out, token, nthreads, 600)
+producer  = asapo_producer.create_producer(source,'processed',beamtime,'auto', stream_out, token, nthreads, 600)
 
 group_id  = broker.generate_group_id()
 
diff --git a/examples/producer/dummy-data-producer/check_linux.sh b/examples/producer/dummy-data-producer/check_linux.sh
index 2e6f36f0953ddc8bd2104051adb387ac1978531e..1366ebc886dc03cf1edca2ecba893b038c14c433 100644
--- a/examples/producer/dummy-data-producer/check_linux.sh
+++ b/examples/producer/dummy-data-producer/check_linux.sh
@@ -7,14 +7,14 @@ set -e
 trap Cleanup EXIT
 
 Cleanup() {
-rm -rf files
+ rm -rf files
 }
 
 mkdir files
 
 $@ files beamtime_id 11 4 4 1 10 2>&1 | grep Rate
 
-ls -ln files/1 | awk '{ print $5 }'| grep 11000
-ls -ln files/2 | awk '{ print $5 }'| grep 11000
-ls -ln files/3 | awk '{ print $5 }'| grep 11000
-ls -ln files/4 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/1 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/2 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/3 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/4 | awk '{ print $5 }'| grep 11000
diff --git a/examples/producer/dummy-data-producer/check_windows.bat b/examples/producer/dummy-data-producer/check_windows.bat
index b6a20167fd0b9bb4dcc2e2fcdd98fcf7254fcecf..7e706364aa864981af665e63b8dce8d33858ee1e 100644
--- a/examples/producer/dummy-data-producer/check_windows.bat
+++ b/examples/producer/dummy-data-producer/check_windows.bat
@@ -4,16 +4,16 @@ mkdir %folder%
 
 "%1" %folder% beamtime_id 11 4 4 1 10 2>&1 | findstr "Rate" || goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\2') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\2') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\3') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\3') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\4') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\4') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
 goto :clean
diff --git a/examples/producer/dummy-data-producer/dummy_data_producer.cpp b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
index 25f6e18851512689427c7fe321e9ce3db3d93500..8b66ce5d1ac3852788571a2dbfcc4ef279ae65dc 100644
--- a/examples/producer/dummy-data-producer/dummy_data_producer.cpp
+++ b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
@@ -8,7 +8,7 @@
 #include <sstream>
 
 #include "asapo_producer.h"
-
+#include "preprocessor/definitions.h"
 
 using std::chrono::system_clock;
 
@@ -71,7 +71,7 @@ void ProcessCommandArguments(int argc, char* argv[], Args* args) {
         std::cout <<
                   "Usage: " << argv[0] <<
                   " <destination> <beamtime_id[%<stream>%<token>]> <number_of_byte> <iterations> <nthreads>"
-                  " <mode x0 -t tcp, x1 - filesystem, 0x - write files, 1x - do not write files> <timeout (sec)> [n images in set (default 1)]"
+                  " <mode 0xx - processed source type, 1xx - raw source type, xx0 -t tcp, xx1 - filesystem, x0x - write files, x1x - do not write files> <timeout (sec)> [n images in set (default 1)]"
                   << std::endl;
         exit(EXIT_FAILURE);
     }
@@ -127,7 +127,7 @@ asapo::FileData CreateMemoryBuffer(size_t size) {
 
 
 bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t iterations, uint64_t images_in_set,
-                   const std::string& stream, bool write_files) {
+                   const std::string& stream, bool write_files, asapo::SourceType type) {
 
     asapo::Error err;
     if (iterations == 0) {
@@ -138,13 +138,17 @@ bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t it
         }
     }
 
-    for(uint64_t i = 0; i < iterations; i++) {
+    std::string image_folder = GetStringFromSourceType(type)+asapo::kPathSeparator;
+
+
+    for (uint64_t i = 0; i < iterations; i++) {
         auto buffer = CreateMemoryBuffer(number_of_byte);
         asapo::EventHeader event_header{i + 1, number_of_byte, std::to_string(i + 1)};
         std::string meta = "{\"user_meta\":\"test" + std::to_string(i + 1) + "\"}";
         if (!stream.empty()) {
             event_header.file_name = stream + "/" + event_header.file_name;
         }
+        event_header.file_name = image_folder+event_header.file_name;
         event_header.user_metadata = std::move(meta);
         if (images_in_set == 1) {
             auto err = producer->SendData(event_header, std::move(buffer), write_files ? asapo::kDefaultIngestMode :
@@ -163,6 +167,7 @@ bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t it
                 if (!stream.empty()) {
                     event_header.file_name = stream + "/" + event_header.file_name;
                 }
+                event_header.file_name = image_folder + event_header.file_name;
                 event_header.user_metadata = meta;
                 auto err = producer->SendData(event_header, std::move(buffer), write_files ? asapo::kDefaultIngestMode :
                                               asapo::kTransferData, &ProcessAfterSend);
@@ -180,7 +185,7 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.discovery_service_endpoint, args.nthreads,
                                             args.mode % 10 == 0 ? asapo::RequestHandlerType::kTcp : asapo::RequestHandlerType::kFilesystem,
-                                            asapo::SourceCredentials{args.beamtime_id, "", args.stream, args.token }, 3600, &err);
+                                            asapo::SourceCredentials{args.mode / 100 == 0 ?asapo::SourceType::kProcessed:asapo::SourceType::kRaw,args.beamtime_id, "", args.stream, args.token }, 3600, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
@@ -216,7 +221,7 @@ int main (int argc, char* argv[]) {
     system_clock::time_point start_time = system_clock::now();
 
     if(!SendDummyData(producer.get(), args.number_of_bytes, args.iterations, args.images_in_set, args.stream,
-                      args.mode / 10 == 0)) {
+                      args.mode / 10 == 0,args.mode / 100 == 0 ?asapo::SourceType::kProcessed:asapo::SourceType::kRaw)) {
         return EXIT_FAILURE;
     }
 
diff --git a/examples/producer/simple-producer/produce.cpp b/examples/producer/simple-producer/produce.cpp
index c1f850bc0c428a66cac85dcdc92e8b8942b23bb1..33333eaa936821ec08308fd3cf66bb003d0199de 100644
--- a/examples/producer/simple-producer/produce.cpp
+++ b/examples/producer/simple-producer/produce.cpp
@@ -34,7 +34,7 @@ int main(int argc, char* argv[]) {
     auto buffer =  asapo::FileData(new uint8_t[send_size]);
     memcpy(buffer.get(),to_send.c_str(),send_size);
 
-    asapo::EventHeader event_header{1, send_size, "test_file"};
+    asapo::EventHeader event_header{1, send_size, "processed"+asapo::kPathseparator +"test_file"};
     err = producer->SendData(event_header, std::move(buffer), asapo::kDefaultIngestMode, &ProcessAfterSend);
     exit_if_error("Cannot send file", err);
 
diff --git a/producer/api/cpp/include/producer/producer.h b/producer/api/cpp/include/producer/producer.h
index 783c92c7e0ffe62187e888341b08cb7441d09e9c..3ba8f622810b4c1328e87f8f35b3aa3071fc4ff2 100644
--- a/producer/api/cpp/include/producer/producer.h
+++ b/producer/api/cpp/include/producer/producer.h
@@ -10,7 +10,6 @@
 
 namespace asapo {
 
-
 /** @ingroup producer */
 class Producer {
   public:
diff --git a/producer/api/cpp/unittests/test_producer.cpp b/producer/api/cpp/unittests/test_producer.cpp
index 9e82be03ed1e4dbc8ee3ea52da6303bebf53e904..2e1fae519e39b6362758b988f7f266e8bafe0b77 100644
--- a/producer/api/cpp/unittests/test_producer.cpp
+++ b/producer/api/cpp/unittests/test_producer.cpp
@@ -15,7 +15,7 @@ namespace {
 TEST(CreateProducer, TcpProducer) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(dynamic_cast<asapo::ProducerImpl*>(producer.get()), Ne(nullptr));
     ASSERT_THAT(err, Eq(nullptr));
 }
@@ -24,13 +24,13 @@ TEST(CreateProducer, ErrorBeamtime) {
     asapo::Error err;
     std::string expected_beamtimeid(asapo::kMaxMessageSize * 10, 'a');
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{expected_beamtimeid, "", "", ""}, 3600, &err);
+                                                SourceCredentials{asapo::SourceType::kRaw,expected_beamtimeid, "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
 
 TEST(CreateProducer, ErrorOnBothAutoBeamlineBeamtime) {
-    asapo::SourceCredentials creds{"auto", "auto", "subname", "token"};
+    asapo::SourceCredentials creds{asapo::SourceType::kRaw,"auto", "auto", "subname", "token"};
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
                                                 creds, 3600, &err);
@@ -41,7 +41,7 @@ TEST(CreateProducer, ErrorOnBothAutoBeamlineBeamtime) {
 TEST(CreateProducer, TooManyThreads) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("", asapo::kMaxProcessingThreads + 1,
-                                                asapo::RequestHandlerType::kTcp, SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                asapo::RequestHandlerType::kTcp, SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
@@ -50,7 +50,7 @@ TEST(CreateProducer, TooManyThreads) {
 TEST(CreateProducer, ZeroThreads) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("", 0,
-                                                asapo::RequestHandlerType::kTcp, SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                asapo::RequestHandlerType::kTcp, SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
@@ -59,7 +59,7 @@ TEST(CreateProducer, ZeroThreads) {
 TEST(Producer, SimpleWorkflowWihoutConnection) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("hello", 5, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{"bt", "", "", ""}, 3600,
+                                                SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600,
                                                 &err);
 
     asapo::EventHeader event_header{1, 1, "test"};
diff --git a/producer/api/cpp/unittests/test_producer_impl.cpp b/producer/api/cpp/unittests/test_producer_impl.cpp
index 798a49c17f435da1f12b7151ba9319f6771ce126..a72a0ad0fa0003a461e604d427a2eb6f9770ba7b 100644
--- a/producer/api/cpp/unittests/test_producer_impl.cpp
+++ b/producer/api/cpp/unittests/test_producer_impl.cpp
@@ -72,14 +72,14 @@ class ProducerImplTests : public testing::Test {
     char expected_substream[asapo::kMaxMessageSize] = "test_substream";
     std::string expected_next_substream = "next_substream";
 
-    asapo::SourceCredentials expected_credentials{"beamtime_id", "beamline", "subname", "token"
+    asapo::SourceCredentials expected_credentials{asapo::SourceType::kRaw,"beamtime_id", "beamline", "subname", "token"
     };
     asapo::SourceCredentials expected_default_credentials{
-        "beamtime_id", "", "", "token"
+        asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"
     };
 
-    std::string expected_credentials_str = "beamtime_id%beamline%subname%token";
-    std::string expected_default_credentials_str = "beamtime_id%auto%detector%token";
+    std::string expected_credentials_str = "raw%beamtime_id%beamline%subname%token";
+    std::string expected_default_credentials_str = "processed%beamtime_id%auto%detector%token";
 
     std::string expected_metadata = "meta";
     std::string expected_fullpath = "filename";
@@ -391,7 +391,7 @@ TEST_F(ProducerImplTests, OKSendingSendFileRequestWithSubstream) {
 
 TEST_F(ProducerImplTests, ErrorSettingBeamtime) {
     std::string long_str(asapo::kMaxMessageSize * 10, 'a');
-    expected_credentials = asapo::SourceCredentials{long_str, "", "", ""};
+    expected_credentials = asapo::SourceCredentials{asapo::SourceType::kRaw,long_str, "", "", ""};
     EXPECT_CALL(mock_logger, Error(testing::HasSubstr("too long")));
 
     auto err = producer.SetCredentials(expected_credentials);
@@ -402,8 +402,8 @@ TEST_F(ProducerImplTests, ErrorSettingBeamtime) {
 TEST_F(ProducerImplTests, ErrorSettingSecondTime) {
     EXPECT_CALL(mock_logger, Error(testing::HasSubstr("already")));
 
-    producer.SetCredentials(asapo::SourceCredentials{"1", "", "2", "3"});
-    auto err = producer.SetCredentials(asapo::SourceCredentials{"4", "", "5", "6"});
+    producer.SetCredentials(asapo::SourceCredentials{asapo::SourceType::kRaw,"1", "", "2", "3"});
+    auto err = producer.SetCredentials(asapo::SourceCredentials{asapo::SourceType::kRaw,"4", "", "5", "6"});
 
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index 61e5f6beaec0893b7cca06a3581279825e4ab29f..d08450fc364c34be3c71508a16cb44076714575e 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -22,6 +22,8 @@ cdef extern from "asapo_producer.h" namespace "asapo":
   ErrorTemplateInterface kLocalIOError "asapo::ProducerErrorTemplates::kLocalIOError"
   ErrorTemplateInterface kServerWarning "asapo::ProducerErrorTemplates::kServerWarning"
 
+
+
 cdef extern from "asapo_producer.h" namespace "asapo":
   cppclass FileData:
     unique_ptr[uint8_t[]] release()
@@ -46,11 +48,15 @@ cdef extern from "asapo_producer.h" namespace "asapo":
 
 
 cdef extern from "asapo_producer.h" namespace "asapo":
+  cppclass SourceType:
+    pass
+  cdef Error GetSourceTypeFromString(string types,SourceType * type)
   struct  SourceCredentials:
     string beamtime_id
     string beamline
     string stream
     string user_token
+    SourceType type
 
 cdef extern from "asapo_producer.h" namespace "asapo":
   struct  EventHeader:
@@ -92,6 +98,7 @@ cdef extern from "asapo_wrappers.h" namespace "asapo":
     RequestCallback unwrap_callback_with_memory(RequestCallbackCythonMemory, void*,void*,void*)
 
 
+
 cdef extern from "asapo_producer.h" namespace "asapo" nogil:
     cppclass Producer:
         @staticmethod
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index db3f83dc3d77b3cbe4e84a8115a7d25a684a4e26..7a991b6b8120bada992c819f04e4464548b76bc2 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -301,23 +301,30 @@ cdef class PyProducer:
             if self.c_producer.get() is not NULL:
                 self.c_producer.get().StopThreads__()
     @staticmethod
-    def __create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
+    def __create_producer(endpoint,type,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
         pyProd = PyProducer()
         cdef Error err
+        cdef SourceType source_type
+        err = GetSourceTypeFromString(type,&source_type)
+        if err:
+            throw_exception(err)
         cdef SourceCredentials source
         source.beamtime_id = beamtime_id
         source.beamline = beamline
         source.user_token = token
         source.stream = stream
+        source.type = source_type
         pyProd.c_producer = Producer.Create(endpoint,nthreads,RequestHandlerType_Tcp,source,timeout_sec,&err)
         if err:
             throw_exception(err)
         return pyProd
 
-def create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
+def create_producer(endpoint,type,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
     """
          :param endpoint: server endpoint (url:port)
          :type endpoint: string
+         :param type: source type, "raw" to write to "raw" folder in beamline filesystem,"processed" to write to "processed" folder in core filesystem
+         :type type: string
          :param beamtime_id: beamtime id, can be "auto" if beamline is given, will automatically select the current beamtime id
          :type beamtime_id: string
          :param beamline: beamline name, can be "auto" if beamtime_id is given
@@ -334,7 +341,7 @@ def create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_
             AsapoWrongInputError: wrong input (number of threads, ,,,)
             AsapoProducerError: actually should not happen
     """
-    return PyProducer.__create_producer(_bytes(endpoint),_bytes(beamtime_id),_bytes(beamline),_bytes(stream),_bytes(token),nthreads,timeout_sec)
+    return PyProducer.__create_producer(_bytes(endpoint),_bytes(type),_bytes(beamtime_id),_bytes(beamline),_bytes(stream),_bytes(token),nthreads,timeout_sec)
 
 
 __version__ = "@PYTHON_ASAPO_VERSION@@ASAPO_VERSION_COMMIT@"
diff --git a/producer/event_monitor_producer/src/main_eventmon.cpp b/producer/event_monitor_producer/src/main_eventmon.cpp
index 0b599676e70f8c89034ba31d7f7020421cd35c81..d2ebf9f0aaa605b8e5b1adae3ca25a2bf2365f4e 100644
--- a/producer/event_monitor_producer/src/main_eventmon.cpp
+++ b/producer/event_monitor_producer/src/main_eventmon.cpp
@@ -39,7 +39,7 @@ std::unique_ptr<Producer> CreateProducer() {
 
     Error err;
     auto producer = Producer::Create(config->asapo_endpoint, (uint8_t) config->nthreads,
-                                     config->mode, asapo::SourceCredentials{config->beamtime_id, "", config->stream, ""}, 3600, &err);
+                                     config->mode, asapo::SourceCredentials{asapo::SourceType::kProcessed,config->beamtime_id, "", config->stream, ""}, 3600, &err);
     if(err) {
         std::cerr << "cannot create producer: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/receiver/CMakeLists.txt b/receiver/CMakeLists.txt
index bbf62d388e76b126bf2e8ded4247c7cfa3a7cba5..7068c6b0789dc30a3225ee646be2d7d24d0d7a34 100644
--- a/receiver/CMakeLists.txt
+++ b/receiver/CMakeLists.txt
@@ -96,6 +96,7 @@ set(TEST_SOURCE_FILES
         unittests/request_handler/test_requests_dispatcher.cpp
         unittests/test_datacache.cpp
         unittests/file_processors/test_write_file_processor.cpp
+        unittests/file_processors/test_file_processor.cpp
         unittests/file_processors/test_receive_file_processor.cpp
         )
 #
diff --git a/receiver/src/file_processors/file_processor.cpp b/receiver/src/file_processors/file_processor.cpp
index d940e31395914b397b6e749ddcc04052faf58862..51866022aba9a238e21a7a878197a017a09acba5 100644
--- a/receiver/src/file_processors/file_processor.cpp
+++ b/receiver/src/file_processors/file_processor.cpp
@@ -2,6 +2,8 @@
 
 #include "io/io_factory.h"
 #include "../receiver_logger.h"
+#include "../receiver_config.h"
+#include "../request.h"
 
 namespace asapo {
 
@@ -9,4 +11,40 @@ FileProcessor::FileProcessor(): io__{GenerateDefaultIO()}, log__{GetDefaultRecei
 
 }
 
+Error GetRootFolder(const Request* request, std::string* root_folder) {
+    std::string root;
+    auto fname = request->GetFileName();
+    auto pos = fname.find(asapo::kPathSeparator);
+    if (pos == std::string::npos) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("cannot extract root folder from file path "+fname);
+    }
+
+    auto posr = fname.find("..");
+    if (posr != std::string::npos) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("cannot use relative path in path name "+fname);
+    }
+
+    std::string file_folder = fname.substr(0, pos);
+    auto folder_by_type = GetStringFromSourceType(request->GetSourceType());
+    if (file_folder!=folder_by_type) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("file "+fname+" is not in "+folder_by_type +" folder");
+    }
+
+    switch (request->GetSourceType()) {
+        case SourceType::kProcessed:
+            root = request->GetOfflinePath();
+            break;
+        case SourceType::kRaw:
+            root = request->GetOnlinePath();
+            if (root.empty()) {
+                return ReceiverErrorTemplates::kBadRequest.Generate("online path not available");
+            }
+            break;
+    }
+
+    *root_folder = root;
+    return nullptr;
+}
+
+
 }
diff --git a/receiver/src/file_processors/file_processor.h b/receiver/src/file_processors/file_processor.h
index b57ca733c3db3c4d42df5b36312c41702ccc868c..433a4c896fd13967b0de8120ba274047245a2c41 100644
--- a/receiver/src/file_processors/file_processor.h
+++ b/receiver/src/file_processors/file_processor.h
@@ -8,6 +8,8 @@ namespace asapo {
 
 class Request;
 
+Error GetRootFolder(const Request* request, std::string* root_folder);
+
 class FileProcessor {
   public:
     FileProcessor();
diff --git a/receiver/src/file_processors/receive_file_processor.cpp b/receiver/src/file_processors/receive_file_processor.cpp
index 1388e4f1c08613ed7ee07a19a46a2d4aadd7f198..7e291bd313efc8c53e4b6d72f68b2fa8777ec230 100644
--- a/receiver/src/file_processors/receive_file_processor.cpp
+++ b/receiver/src/file_processors/receive_file_processor.cpp
@@ -16,8 +16,12 @@ Error ReceiveFileProcessor::ProcessFile(const Request* request, bool overwrite)
     auto fsize = request->GetDataSize();
     auto socket = request->GetSocket();
     auto fname = request->GetFileName();
-    auto root_folder = request->GetOfflinePath();
-    auto err =  io__->ReceiveDataToFile(socket, root_folder, fname, (size_t) fsize, true, overwrite);
+    std::string root_folder;
+    auto err = GetRootFolder(request,&root_folder);
+    if (err) {
+        return err;
+    }
+    err =  io__->ReceiveDataToFile(socket, root_folder, fname, (size_t) fsize, true, overwrite);
     if (!err) {
         log__->Debug("received file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
     }
diff --git a/receiver/src/file_processors/write_file_processor.cpp b/receiver/src/file_processors/write_file_processor.cpp
index 58a956f10536020a51f2c810d73af0e6901aa494..8437160e2bc22cb89a44bc76f3bfc332369920ce 100644
--- a/receiver/src/file_processors/write_file_processor.cpp
+++ b/receiver/src/file_processors/write_file_processor.cpp
@@ -21,9 +21,13 @@ Error WriteFileProcessor::ProcessFile(const Request* request, bool overwrite) co
 
     auto data = request->GetData();
     auto fname = request->GetFileName();
-    auto root_folder = request->GetOfflinePath();
+    std::string root_folder;
+    auto err = GetRootFolder(request,&root_folder);
+    if (err) {
+        return err;
+    }
 
-    auto err =  io__->WriteDataToFile(root_folder, fname, (uint8_t*)data, (size_t) fsize, true, overwrite);
+    err =  io__->WriteDataToFile(root_folder, fname, (uint8_t*)data, (size_t) fsize, true, overwrite);
     if (!err) {
         log__->Debug("saved file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
     }
diff --git a/receiver/src/request.cpp b/receiver/src/request.cpp
index 8ca9d9b4c017c58e8063a427acc837af872ad1ad..21b11f5948bf7515327ea174e2c08c25dc23b383 100644
--- a/receiver/src/request.cpp
+++ b/receiver/src/request.cpp
@@ -193,5 +193,11 @@ const ResponseMessageType Request::GetResponseMessageType() const {
 Error Request::CheckForDuplicates()  {
     return check_duplicate_request_handler_->ProcessRequest(this);
 }
+void Request::SetSourceType(SourceType type) {
+    source_type_ = type;
+}
+SourceType Request::GetSourceType() const {
+    return source_type_;
+}
 
 }
diff --git a/receiver/src/request.h b/receiver/src/request.h
index 22e80ecfee3cc0a87af391a92cabbbb67c1e19f1..6f4c001c67a960dedfc13073f487c74c4bb06ecf 100644
--- a/receiver/src/request.h
+++ b/receiver/src/request.h
@@ -19,6 +19,8 @@
 #include "data_cache.h"
 
 #include "preprocessor/definitions.h"
+#include "file_processors/file_processor.h"
+
 namespace asapo {
 
 using RequestHandlerList = std::vector<const ReceiverRequestHandler*>;
@@ -52,6 +54,9 @@ class Request {
     VIRTUAL void SetBeamtimeId(std::string beamtime_id);
     VIRTUAL void SetBeamline(std::string beamline);
 
+    VIRTUAL void SetSourceType(SourceType);
+    VIRTUAL SourceType GetSourceType() const;
+
     VIRTUAL const std::string& GetStream() const;
     VIRTUAL void SetStream(std::string stream);
     VIRTUAL void SetMetadata(std::string metadata);
@@ -93,9 +98,9 @@ class Request {
     std::string response_message_;
     ResponseMessageType response_message_type_;
     const RequestHandlerDbCheckRequest* check_duplicate_request_handler_;
+    SourceType source_type_ = SourceType::kProcessed;
 };
 
-
 }
 
 #endif //ASAPO_REQUEST_H
diff --git a/receiver/src/request_handler/request_handler_authorize.cpp b/receiver/src/request_handler/request_handler_authorize.cpp
index 685eb59b577bd8e107367609f80f690db5b5a507..3b5f7fc59cde995b1fb342e3205dd194afdbf878 100644
--- a/receiver/src/request_handler/request_handler_authorize.cpp
+++ b/receiver/src/request_handler/request_handler_authorize.cpp
@@ -41,16 +41,20 @@ Error RequestHandlerAuthorize::Authorize(Request* request, const char* source_cr
         return auth_error;
     }
 
+    std::string stype;
+
     JsonStringParser parser{response};
     (err = parser.GetString("beamtimeId", &beamtime_id_)) ||
     (err = parser.GetString("stream", &stream_)) ||
     (err = parser.GetString("core-path", &offline_path_)) ||
     (err = parser.GetString("beamline-path", &online_path_)) ||
+    (err = parser.GetString("source-type", &stype)) ||
+    (err = GetSourceTypeFromString(stype, &source_type_)) ||
     (err = parser.GetString("beamline", &beamline_));
     if (err) {
         return ErrorFromAuthorizationServerResponse(err, code);
     } else {
-        log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() + " beamline: " +
+        log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() +"source type: "+stype+ " beamline: " +
                      beamline_ + ", beamtime id: " + beamtime_id_ + ", stream: " + stream_);
     }
 
@@ -105,6 +109,7 @@ Error RequestHandlerAuthorize::ProcessOtherRequest(Request* request) const {
     request->SetStream(stream_);
     request->SetOfflinePath(offline_path_);
     request->SetOnlinePath(online_path_);
+    request->SetSourceType(source_type_);
     return nullptr;
 }
 
diff --git a/receiver/src/request_handler/request_handler_authorize.h b/receiver/src/request_handler/request_handler_authorize.h
index c0bcd062b046094ab4cdb26422df9489cdda7c44..7d6af9aabccf1ee305a036a717ad9043a8fb823b 100644
--- a/receiver/src/request_handler/request_handler_authorize.h
+++ b/receiver/src/request_handler/request_handler_authorize.h
@@ -25,6 +25,7 @@ class RequestHandlerAuthorize final: public ReceiverRequestHandler {
     mutable std::string beamline_;
     mutable std::string offline_path_;
     mutable std::string online_path_;
+    mutable SourceType source_type_;
     mutable std::string cached_source_credentials_;
     mutable std::chrono::system_clock::time_point last_updated_;
     Error ProcessAuthorizationRequest(Request* request) const;
diff --git a/receiver/src/statistics/statistics_sender_influx_db.cpp b/receiver/src/statistics/statistics_sender_influx_db.cpp
index f1bea1305683d97604e1363658427f9d293f8010..e6b1ca8a706b0dd571e784c7182ab4245c501df9 100644
--- a/receiver/src/statistics/statistics_sender_influx_db.cpp
+++ b/receiver/src/statistics/statistics_sender_influx_db.cpp
@@ -35,8 +35,6 @@ void StatisticsSenderInfluxDb::SendStatistics(const StatisticsToSend& statistic)
         log__->Error(msg + " - " + response);
         return;
     }
-
-    log__->Debug(msg);
 }
 
 std::string StatisticsSenderInfluxDb::StatisticsToString(const StatisticsToSend& statistic) const noexcept {
diff --git a/receiver/unittests/file_processors/test_file_processor.cpp b/receiver/unittests/file_processors/test_file_processor.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..7b658f5985f75cea4dac81b1264f343559806ecf
--- /dev/null
+++ b/receiver/unittests/file_processors/test_file_processor.cpp
@@ -0,0 +1,122 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+
+#include "unittests/MockIO.h"
+#include "unittests/MockLogger.h"
+
+#include "../../src/file_processors/receive_file_processor.h"
+#include "common/networking.h"
+#include "preprocessor/definitions.h"
+#include "../mock_receiver_config.h"
+
+#include "../receiver_mocking.h"
+
+using ::testing::Test;
+using ::testing::Return;
+using ::testing::ReturnRef;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::SetArgReferee;
+using ::testing::Gt;
+using ::testing::Eq;
+using ::testing::Ne;
+using ::testing::Mock;
+using ::testing::NiceMock;
+using ::testing::InSequence;
+using ::testing::SetArgPointee;
+using ::testing::AllOf;
+using ::testing::HasSubstr;
+
+
+using ::asapo::Error;
+using ::asapo::GetRootFolder;
+using ::asapo::ErrorInterface;
+using ::asapo::FileDescriptor;
+using ::asapo::SocketDescriptor;
+using ::asapo::MockIO;
+using asapo::Request;
+using asapo::ReceiveFileProcessor;
+using ::asapo::GenericRequestHeader;
+using asapo::MockRequest;
+
+namespace {
+
+class FileProcessorTests : public Test {
+  public:
+    NiceMock<MockIO> mock_io;
+    std::unique_ptr<MockRequest> mock_request;
+    NiceMock<asapo::MockLogger> mock_logger;
+    std::string expected_offline_path =  "offline";
+    std::string expected_online_path =  "online";
+    void MockRequestData(std::string fname,asapo::SourceType type);
+    void SetUp() override {
+        GenericRequestHeader request_header;
+        request_header.data_id = 2;
+        asapo::ReceiverConfig test_config;
+        asapo::SetReceiverConfig(test_config, "none");
+        mock_request.reset(new MockRequest{request_header, 1, "", nullptr});
+    }
+    void TearDown() override {
+    }
+
+};
+
+void FileProcessorTests::MockRequestData(std::string fname,asapo::SourceType type) {
+
+    if (type == asapo::SourceType::kProcessed) {
+            EXPECT_CALL(*mock_request, GetOfflinePath())
+             .WillRepeatedly(ReturnRef(expected_offline_path));
+    } else {
+        EXPECT_CALL(*mock_request, GetOnlinePath())
+            .WillRepeatedly(ReturnRef(expected_online_path));
+    }
+
+    EXPECT_CALL(*mock_request, GetSourceType()).WillRepeatedly(Return(type));
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(1)
+    .WillRepeatedly(Return(fname));
+}
+
+
+std::string repl_sep(const std::string& orig) {
+    std::string str = orig;
+    std::replace(str.begin(), str.end(), '/', asapo::kPathSeparator); // needed for Windows tests
+    return str;
+}
+
+TEST_F(FileProcessorTests, RawWriteToRaw) {
+
+    struct Test {
+      asapo::SourceType type;
+      std::string filename;
+      bool error;
+      std::string res;
+    };
+    std::vector<Test> tests = {
+        Test{asapo::SourceType::kProcessed,repl_sep("processed/bla.text"),false,expected_offline_path},
+        Test{asapo::SourceType::kProcessed,repl_sep("raw/bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("processed/../bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("bla/bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("./bla.text"),true,""},
+        Test{asapo::SourceType::kRaw,repl_sep("raw/bla.text"),false,expected_online_path},
+    };
+
+    for (auto& test: tests) {
+        MockRequestData(test.filename,test.type);
+        std::string res;
+        auto err = GetRootFolder(mock_request.get(),&res);
+        if (test.error) {
+            ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kBadRequest));
+        } else {
+            ASSERT_THAT(err, Eq(nullptr));
+            ASSERT_THAT(res, Eq(test.res));
+        }
+        Mock::VerifyAndClearExpectations(&mock_request);
+    }
+
+}
+
+
+
+}
diff --git a/receiver/unittests/file_processors/test_receive_file_processor.cpp b/receiver/unittests/file_processors/test_receive_file_processor.cpp
index 65648c87122ce5d56899796f7b4ac44a058c2490..612726b465a079215bf46abb014bb1c9cefdc153 100644
--- a/receiver/unittests/file_processors/test_receive_file_processor.cpp
+++ b/receiver/unittests/file_processors/test_receive_file_processor.cpp
@@ -54,11 +54,12 @@ class ReceiveFileProcessorTests : public Test {
     std::unique_ptr<MockRequest> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
     SocketDescriptor expected_socket_id = SocketDescriptor{1};
-    std::string expected_file_name = "2";
+    std::string expected_file_name = std::string("processed")+asapo::kPathSeparator+std::string("2");
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_beamline = "beamline";
     std::string expected_facility = "facility";
     std::string expected_year = "2020";
+    asapo::SourceType expected_source_type = asapo::SourceType::kProcessed;
     uint64_t expected_file_size = 10;
     bool expected_overwrite = false;
     std::string expected_root_folder = "root_folder";
@@ -97,7 +98,11 @@ void ReceiveFileProcessorTests::MockRequestData() {
     EXPECT_CALL(*mock_request, GetOfflinePath()).Times(1)
     .WillRepeatedly(ReturnRef(expected_full_path));
 
-    EXPECT_CALL(*mock_request, GetFileName()).Times(1)
+    EXPECT_CALL(*mock_request, GetSourceType()).Times(2)
+        .WillRepeatedly(Return(expected_source_type));
+
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(2)
     .WillRepeatedly(Return(expected_file_name));
 }
 
diff --git a/receiver/unittests/file_processors/test_write_file_processor.cpp b/receiver/unittests/file_processors/test_write_file_processor.cpp
index b438322cd23ddb8d5ae545d68705fbcbf22af702..6b6f050f5f893a7504a83d0f27950cc50a053b26 100644
--- a/receiver/unittests/file_processors/test_write_file_processor.cpp
+++ b/receiver/unittests/file_processors/test_write_file_processor.cpp
@@ -53,7 +53,8 @@ class WriteFileProcessorTests : public Test {
     NiceMock<MockIO> mock_io;
     std::unique_ptr<MockRequest> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
-    std::string expected_file_name = "2";
+    std::string expected_file_name = std::string("raw")+asapo::kPathSeparator+std::string("2");
+    asapo::SourceType expected_source_type = asapo::SourceType::kRaw;
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_beamline = "beamline";
     std::string expected_facility = "facility";
@@ -100,10 +101,14 @@ void WriteFileProcessorTests::MockRequestData(int times) {
     EXPECT_CALL(*mock_request, GetData()).Times(times)
     .WillRepeatedly(Return(nullptr));
 
-    EXPECT_CALL(*mock_request, GetOfflinePath()).Times(times)
+    EXPECT_CALL(*mock_request, GetOnlinePath()).Times(times)
     .WillRepeatedly(ReturnRef(expected_full_path));
 
-    EXPECT_CALL(*mock_request, GetFileName()).Times(times)
+    EXPECT_CALL(*mock_request, GetSourceType()).Times(times*2)
+        .WillRepeatedly(Return(expected_source_type));
+
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(times*2)
     .WillRepeatedly(Return(expected_file_name));
 }
 
diff --git a/receiver/unittests/receiver_mocking.h b/receiver/unittests/receiver_mocking.h
index 8aebb8a3e3a2cbcee95023881582dd5864cd8a9c..be51ef72ff48484c4705910e007bab8c94f576ca 100644
--- a/receiver/unittests/receiver_mocking.h
+++ b/receiver/unittests/receiver_mocking.h
@@ -92,6 +92,9 @@ class MockRequest: public Request {
     MOCK_METHOD1(SetOnlinePath, void (std::string));
     MOCK_METHOD1(SetOfflinePath, void (std::string));
 
+    MOCK_METHOD1(SetSourceType, void (SourceType));
+    MOCK_CONST_METHOD0(GetSourceType, SourceType ());
+
     MOCK_CONST_METHOD0(WasAlreadyProcessed, bool());
     MOCK_METHOD0(SetAlreadyProcessedFlag, void());
     MOCK_METHOD2(SetResponseMessage, void(std::string,ResponseMessageType));
diff --git a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
index 92b50cdb65fc1d36ad0bdc95911965431a7392ee..dfc3228cb42203b29371356ed21705aff6db6a90 100644
--- a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
@@ -72,10 +72,12 @@ class AuthorizerHandlerTests : public Test {
     std::string expected_authorization_server = "authorizer_host";
     std::string expect_request_string;
     std::string expected_source_credentials;
+    asapo::SourceType expected_source_type = asapo::SourceType::kProcessed;
+    std::string expected_source_type_str = "processed";
     void MockRequestData();
     void SetUp() override {
         GenericRequestHeader request_header;
-        expected_source_credentials = expected_beamtime_id + "%stream%token";
+        expected_source_credentials = "processed%"+expected_beamtime_id + "%stream%token";
         expect_request_string = std::string("{\"SourceCredentials\":\"") + expected_source_credentials +
                                 "\",\"OriginHost\":\"" +
                                 expected_producer_uri + "\"}";
@@ -112,12 +114,14 @@ class AuthorizerHandlerTests : public Test {
                              "\",\"stream\":" + "\"" + expected_stream +
                              "\",\"beamline-path\":" + "\"" + expected_beamline_path +
                              "\",\"core-path\":" + "\"" + expected_core_path +
+                             "\",\"source-type\":" + "\"" + expected_source_type_str +
                              "\",\"beamline\":" + "\"" + expected_beamline + "\"}")
                      ));
             if (code != HttpCode::OK) {
                 EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"),
                                                      HasSubstr("return code"),
                                                      HasSubstr(std::to_string(int(code))),
+                                                     HasSubstr(expected_source_type_str),
                                                      HasSubstr(expected_beamtime_id),
                                                      HasSubstr(expected_stream),
                                                      HasSubstr(expected_producer_uri),
@@ -126,6 +130,7 @@ class AuthorizerHandlerTests : public Test {
                 EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("authorized"),
                                                      HasSubstr(expected_beamtime_id),
                                                      HasSubstr(expected_beamline),
+                                                     HasSubstr(expected_source_type_str),
                                                      HasSubstr(expected_stream),
                                                      HasSubstr(expected_producer_uri))));
             }
@@ -155,6 +160,7 @@ class AuthorizerHandlerTests : public Test {
             EXPECT_CALL(*mock_request, SetOfflinePath(expected_core_path));
             EXPECT_CALL(*mock_request, SetOnlinePath(expected_beamline_path));
             EXPECT_CALL(*mock_request, SetBeamline(expected_beamline));
+            EXPECT_CALL(*mock_request, SetSourceType(expected_source_type));
         }
 
         MockAuthRequest(error, code);
@@ -262,7 +268,7 @@ TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeUsesCachedValue) {
     EXPECT_CALL(*mock_request, SetStream(expected_stream));
     EXPECT_CALL(*mock_request, SetOnlinePath(expected_beamline_path));
     EXPECT_CALL(*mock_request, SetOfflinePath(expected_core_path));
-
+    EXPECT_CALL(*mock_request, SetSourceType(expected_source_type));
     auto err =  handler.ProcessRequest(mock_request.get());
 
     ASSERT_THAT(err, Eq(nullptr));
diff --git a/receiver/unittests/statistics/test_receiver_statistics.cpp b/receiver/unittests/statistics/test_receiver_statistics.cpp
index de05a2a3f12f4df3946c8422819c035b892cd27f..413aeca86a8267a88b3a52da18e4217b95d73a68 100644
--- a/receiver/unittests/statistics/test_receiver_statistics.cpp
+++ b/receiver/unittests/statistics/test_receiver_statistics.cpp
@@ -127,7 +127,7 @@ TEST_F(ReceiverStatisticTests, TimerForAll) {
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Ge(0.25));
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Le(0.35));
 
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Ge(0.35));
+    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Ge(0.3));
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Le(0.45));
 }
 
diff --git a/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp b/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
index bdb33de72a7c0357a4bb2e9a54b271fcce5feb83..c9d6bf2d9342d737e806b8f5c937c4199dfef792 100644
--- a/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
+++ b/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
@@ -106,22 +106,5 @@ TEST_F(SenderInfluxDbTests, LogErrorWithWrongResponceSendStatistics) {
     sender.SendStatistics(statistics);
 }
 
-TEST_F(SenderInfluxDbTests, LogDebugSendStatistics) {
-    EXPECT_CALL(mock_http_client, Post_t(_,_, _, _, _)).
-    WillOnce(
-        DoAll(SetArgPointee<4>(nullptr), SetArgPointee<3>(asapo::HttpCode::OK), Return("ok response")
-             ));
-
-    EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("sending statistics"),
-                                         HasSubstr(config.performance_db_uri),
-                                         HasSubstr(config.performance_db_name)
-                                        )
-                                  )
-               );
-
-
-    sender.SendStatistics(statistics);
-}
-
 
 }
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 11b4622c6528a19a79daa5807874d7f8093862ea..4fbf5ab06f9058a9f31ff4a938826d91a31f9f06 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -6,3 +6,4 @@ configure_files(${CMAKE_CURRENT_SOURCE_DIR}/manual/tests_via_nomad ${CMAKE_CURRE
 add_subdirectory(manual/performance_broker_receiver)
 
 add_subdirectory(manual/asapo_fabric)
+add_subdirectory(manual/producer_cpp)
diff --git a/tests/automatic/authorizer/check_authorize/check_linux.sh b/tests/automatic/authorizer/check_authorize/check_linux.sh
index ceecdffca5e6530fe0b2241452fe8398da5b410d..1e3d99bd6559030cccb0e4e43debf3c8701a0f8d 100644
--- a/tests/automatic/authorizer/check_authorize/check_linux.sh
+++ b/tests/automatic/authorizer/check_authorize/check_linux.sh
@@ -19,27 +19,27 @@ mkdir -p asap3/petra3/gpfs/p01/2019/data/11000015
 mkdir -p beamline/p07/current
 cp beamtime-metadata* beamline/p07/current/
 
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep c20180508-000-COM20181
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep c20180508-000-COM20181
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
 
 token=onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90= #token for c20180508-000-COM20181
 
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%auto%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%bla","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%auto%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
+curl -v --silent --data '{"SourceCredentials":"raw%c20180508-000-COM20181%%stream%bla","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
 token=dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0= #token for 11000015
-curl -v --silent --data '{"SourceCredentials":"11000015%%stream%dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
+curl -v --silent --data '{"SourceCredentials":"raw%11000015%%stream%dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
 
 token=Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs= #token for 11000016
-curl -v --silent --data '{"SourceCredentials":"11000016%%stream%Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+curl -v --silent --data '{"SourceCredentials":"raw%11000016%%stream%Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
 
 token=-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4= # for bl_p07
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
 
 
 rm -rf asap3 beamline
\ No newline at end of file
diff --git a/tests/automatic/authorizer/check_authorize/check_windows.bat b/tests/automatic/authorizer/check_authorize/check_windows.bat
index 3936894d10fa6c4b9e6359cf83edee0ee539e335..3061463d1bd35b68fe432135693e62af0f778adf 100644
--- a/tests/automatic/authorizer/check_authorize/check_windows.bat
+++ b/tests/automatic/authorizer/check_authorize/check_windows.bat
@@ -9,14 +9,14 @@ mkdir asap3\petra3\gpfs\p01\2019\comissioning\c20180508-000-COM20181
 mkdir beamline\p07\current
 copy beamtime-metadata* beamline\p07\current\ /y
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr c20180508-000-COM20181  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%auto%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr p01  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr c20180508-000-COM20181  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%c20180508-000-COM20181%%auto%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr p01  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%wrong\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 401  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%c20180508-000-COM20181%%%%stream%%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%c20180508-000-COM20181%%%%stream%%wrong\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 401  || goto :error
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"auto%%p07%%stream%%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%auto%%p07%%stream%%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py b/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
index 0f048aeed2d6f2236d10dbb2197490b5cb038cfc..f52bcde652a18ad1864bd6805c8fcd6315959355 100644
--- a/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
+++ b/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
@@ -25,7 +25,7 @@ class AsapoSender:
     def _callback(self, header, err):
     	print ("hello self callback")
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 producer.set_log_level("debug")
 
 sender = AsapoSender(producer)
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh b/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
index f9ef5a944ede561b54c50b891dbff9cf73571735..b151f761fd6e17397effa78ec711d5bcec462391 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
@@ -16,11 +16,12 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
+mkdir -p /tmp/asapo/test_in/test1
 
 Cleanup() {
     echo cleanup
     rm -rf ${receiver_root_folder}
+    rm -rf /tmp/asapo/test_in
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -35,7 +36,7 @@ nomad run receiver.nmd
 nomad run discovery.nmd
 
 sleep 1
-
+mkdir  /tmp/asapo/test_in/processed
 #producer
 mkdir -p ${receiver_folder}
 $1 test.json &> output &
@@ -43,13 +44,13 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
+echo hello > /tmp/asapo/test_in/processed/file1
 sleep 1
 nomad stop receiver
 sleep 1
 nomad run receiver.nmd
 
-echo hello > /tmp/asapo/test_in/test1/file1
+echo hello > /tmp/asapo/test_in/processed/file1
 sleep 1
 
 kill -s INT $producerid
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
index 2421a18add8a7711b43c85fb2319db3b8b07d725..af9970923562b7db84897ad81875080cc06a0bd2 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
@@ -20,14 +20,13 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\file1
+echo hello > c:\tmp\asapo\test_in\processed\file2
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
@@ -39,7 +38,7 @@ ping 1.0.0.0 -n 3 -w 100 > nul
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-echo hello > c:\tmp\asapo\test_in\test1\file3
+echo hello > c:\tmp\asapo\test_in\processed\file3
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -58,8 +57,7 @@ exit /b 1
 call stop_services.bat
 
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in b/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
index d74bd52ebcf85d75b4e1533dd0288b71337dc4b2..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/consumer/consumer_api/consumer_api.cpp b/tests/automatic/consumer/consumer_api/consumer_api.cpp
index 0ba8d1b644717f3e8587ad5c152025ddd8057031..10935a3acb0f1064737c19e4b8bb2e21e81e56df 100644
--- a/tests/automatic/consumer/consumer_api/consumer_api.cpp
+++ b/tests/automatic/consumer/consumer_api/consumer_api.cpp
@@ -237,7 +237,7 @@ void TestDataset(const std::unique_ptr<asapo::DataBroker>& broker, const std::st
 void TestAll(const Args& args) {
     asapo::Error err;
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, ".", true,
-                  asapo::SourceCredentials{args.run_name, "", "", args.token}, &err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.run_name, "", "", args.token}, &err);
     broker->SetTimeout(100);
     auto group_id = broker->GenerateNewGroupId(&err);
 
diff --git a/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
index 93917960d16747306a8189b0e4be975018d12e59..6bdecfd4b2b23f89c1c9193eeeaa6861864ee225 100644
--- a/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
+++ b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
@@ -53,7 +53,7 @@ Args GetArgs(int argc, char* argv[]) {
 
 void TestAll(const Args& args) {
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, "dummy", true, asapo::SourceCredentials{args.run_name, "", "", args.token}, &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, "dummy", true, asapo::SourceCredentials{asapo::SourceType::kProcessed,args.run_name, "", "", args.token}, &err);
     auto group_id = broker->GenerateNewGroupId(&err);
     broker->SetTimeout(10000);
     std::vector<asapo::FileInfos>file_infos(args.nthreads);
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
index 75bc3d65bcd9ec7b5cc6b194c888cc669669da35..2b4de25d8b5f5009adc6e0540d8ba5aef4e8e647 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
+++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
@@ -32,7 +32,7 @@ int main(int argc, char* argv[]) {
     std::string authorize_request = "{\"Folder\":\"" + args.folder + "\",\"BeamtimeId\":\"aaa\",\"Token\":\"" + token +
                                     "\"}";
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri_authorizer, "", true, asapo::SourceCredentials{"", "", "", ""}, &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri_authorizer, "", true, asapo::SourceCredentials{asapo::SourceType::kProcessed,"", "", "", ""}, &err);
     auto server_broker = static_cast<asapo::ServerDataBroker*>(broker.get());
 
     asapo::HttpCode code;
diff --git a/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp b/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
index 2862a87bf0f148a140b1197e0877cd1008298df1..2df0d99662f5a28f8ecfbf0618c2ac80a38fb4e1 100644
--- a/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
+++ b/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
@@ -36,7 +36,7 @@ void ProcessAfterSend(asapo::RequestCallbackPayload payload, asapo::Error err) {
 
 BrokerPtr CreateBrokerAndGroup(const Args& args, Error* err) {
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, ".", true,
-                  asapo::SourceCredentials{args.beamtime_id, "", "", args.token}, err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", "", args.token}, err);
     if (*err) {
         return nullptr;
     }
@@ -56,7 +56,8 @@ ProducerPtr CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.server, 1,
                                             asapo::RequestHandlerType::kTcp,
-                                            asapo::SourceCredentials{args.beamtime_id, "", "", args.token }, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,
+                                                                     args.beamtime_id, "", "", args.token }, 60, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py b/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
index 60ad65264b71f728a5f16c2a8babfb3d03d9c2f4..9829ea3c40284c60f0cc73e031a48f59f917c4b8 100644
--- a/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
+++ b/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
@@ -28,7 +28,7 @@ def callback(header,err):
 source, beamtime, token = sys.argv[1:]
 
 broker = asapo_consumer.create_server_broker(source,".",True, beamtime,"",token,timeout)
-producer  = asapo_producer.create_producer(source,beamtime,'auto', "", token, 1, 600)
+producer  = asapo_producer.create_producer(source,'processed',beamtime,'auto', "", token, 1, 600)
 producer.set_log_level("debug")
 
 group_id  = broker.generate_group_id()
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
index 581c064b6b146ff95e52b35690c7047af8723385..84454700eb41024d5b3f8b1dcfa673b138bcd877 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
@@ -16,15 +16,13 @@ facility=test_facility
 year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
-
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -51,12 +49,16 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file2
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
+
+
+echo hello > /tmp/asapo/test_in/processed/test1/file1
+echo hello > /tmp/asapo/test_in/processed/test1/file2
+echo hello > /tmp/asapo/test_in/processed/test2/file1
 
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 1 | tee /dev/stderr | grep "Processed 3 file(s)"
 
-test ! -f /tmp/asapo/test_in/test1/file1
-test ! -f /tmp/asapo/test_in/test1/file2
-test ! -f /tmp/asapo/test_in/test2/file2
+test ! -f /tmp/asapo/test_in/processed/test1/file1
+test ! -f /tmp/asapo/test_in/processed/test1/file2
+test ! -f /tmp/asapo/test_in/processed/test2/file1
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
index 500e19ccdfdafaaa065ad5b4f4898523fcd0642c..235f69460f7eef699ac61a751ff825b4b9a04cac 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
@@ -20,15 +20,18 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+
+
+echo hello > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -47,8 +50,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen/test.json.in b/tests/automatic/full_chain/simple_chain_filegen/test.json.in
index 3c77ba335ac934c2b4ce32e9f531e21b56058709..eddefac2b2a700bbdd4703bd7221b641a99216a1 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
index 830a312ea727633cb32798529408febbde77495a..fd7b996b10848bd4ed57ce124fde3df9ba0f5174 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
@@ -17,14 +17,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
+
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -45,6 +45,10 @@ nomad run broker.nmd
 
 sleep 1
 
+
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
+
 #producer
 mkdir -p ${receiver_folder}
 $1 test.json &
@@ -52,15 +56,15 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file2
+echo hello > /tmp/asapo/test_in/processed/test1/file1
+echo hello > /tmp/asapo/test_in/processed/test1/file2
+echo hello > /tmp/asapo/test_in/processed/test2/file1
 
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 2000 1 1 > out
 cat out
 cat out   | grep "Processed 1 dataset(s)"
 cat out   | grep "with 3 file(s)"
 
-test -f /tmp/asapo/test_in/test1/file1
-test -f /tmp/asapo/test_in/test1/file2
-test -f /tmp/asapo/test_in/test2/file2
+test -f /tmp/asapo/test_in/processed/test1/file1
+test -f /tmp/asapo/test_in/processed/test1/file2
+test -f /tmp/asapo/test_in/processed/test2/file1
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
index 5c5e7017ff72cda4c99cce9c5f30753bbdb60bfc..4ed07ca89add90ce6d244a04f6d0eca5af8f30c4 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
@@ -20,15 +20,16 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file2
+echo hello > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -48,8 +49,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
index f1323b8ee05f6d835a632779eb362354d9595b74..0b760c2ea8d92034668462fb60320c45c9789b2b 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":false,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
index 33709677881f49282e0c128a2dd7ec81c4d74ec9..fbf69e7804739cbfd1a8596c2d8a188a9ed44c4b 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
@@ -17,15 +17,13 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
-
 Cleanup() {
     echo cleanup
-    kill $producerid1
-    kill $producerid2
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid1
+    kill -9 $producerid2
+    rm -rf /tmp/asapo/test_in1
+    rm -rf /tmp/asapo/test_in2
+    rm -rf ${receiver_folder}
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -46,6 +44,10 @@ nomad run broker.nmd
 
 sleep 1
 
+
+mkdir -p /tmp/asapo/test_in1/processed
+mkdir -p /tmp/asapo/test_in2/processed
+
 mkdir -p ${receiver_folder}
 #producer1
 $1 test1.json &
@@ -56,11 +58,13 @@ producerid2=`echo $!`
 
 
 sleep 1
+mkdir -p /tmp/asapo/test_in1/processed/test1
+mkdir -p /tmp/asapo/test_in2/processed/test2
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file1
-echo hello > /tmp/asapo/test_in/test2/file2
+echo hello > /tmp/asapo/test_in1/processed/test1/file1
+echo hello > /tmp/asapo/test_in1/processed/test1/file2
+echo hello > /tmp/asapo/test_in2/processed/test2/file1
+echo hello > /tmp/asapo/test_in2/processed/test2/file2
 
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 2000 1 1 > out
 cat out
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
index a8cf1670079f3c700aa5e395cc4dd8d8fbb1f1eb..69c604136717372e7a293db2a0473c3bfb0dc2f9 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
@@ -19,8 +19,8 @@ echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_det
 call start_services.bat
 
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in1\processed
+mkdir  c:\tmp\asapo\test_in2\processed
 
 REM producer1
 start /B "" "%1" test1.json
@@ -30,11 +30,13 @@ start /B "" "%1" test2.json
 
 
 ping 1.0.0.0 -n 3 -w 100 > nul
+mkdir  c:\tmp\asapo\test_in1\processed\test1
+mkdir  c:\tmp\asapo\test_in2\processed\test2
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file1
-echo hello > c:\tmp\asapo\test_in\test2\file2
+echo hello > c:\tmp\asapo\test_in1\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in1\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in2\processed\test2\file1
+echo hello > c:\tmp\asapo\test_in2\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -54,8 +56,8 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in1
+rmdir /S /Q c:\tmp\asapo\test_in2
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
index eadb0bb3eeb9603bf30bf3e0d0de13376095de82..09aa803aa41948346be1f951e85383364f6827d2 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
@@ -5,8 +5,8 @@
  "Mode":"tcp",
  "NThreads":1,
  "LogLevel":"debug",
- "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test@ID@"],
+ "RootMonitoredFolder":"@ROOT_PATH@test_in@ID@",
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
index 3a60b5c862ac86f91d3b7c14095babfd749f2ee0..63be72aab66a948b90a96a8cefa4ef6df2122f56 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
@@ -17,15 +17,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
     influx -execute "drop database ${monitor_database_name}"
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -52,10 +51,12 @@ $1 test.json &
 producerid=`echo $!`
 
 sleep 1
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
 
-echo -n hello1 > /tmp/asapo/test_in/test1/file1
-echo -n hello2 > /tmp/asapo/test_in/test1/file2
-echo -n hello3 > /tmp/asapo/test_in/test2/file2
+echo -n hello1 > /tmp/asapo/test_in/processed/test1/file1
+echo -n hello2 > /tmp/asapo/test_in/processed/test1/file2
+echo -n hello3 > /tmp/asapo/test_in/processed/test2/file1
 
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 > out.txt
 cat out.txt
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
index 1e3fcd21031106b6e5fd830611bdfe60ffcd8ea6..62603c713361eafefd2ae5ad9c364ee68e68619f 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
@@ -20,15 +20,16 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello1 > c:\tmp\asapo\test_in\test1\file1
-echo hello2 > c:\tmp\asapo\test_in\test1\file2
-echo hello3 > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+echo hello1 > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello2 > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello3 > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -51,8 +52,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 del /f out.txt
 
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
index 9addfcceb52b30c4449268cb140fa04700306a59..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
index 4acd7a74d0bd915bcce1538f9348e43358900cbe..798a011770f8573cfd85d2fe79ad6be16b40ca20 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
@@ -17,14 +17,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
+    influx -execute "drop database ${monitor_database_name}"
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -51,10 +51,12 @@ $1 test.json &
 producerid=`echo $!`
 
 sleep 1
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
 
-echo -n hello1 > /tmp/asapo/test_in/test1/file1
-echo -n hello2 > /tmp/asapo/test_in/test1/file2
-echo -n hello3 > /tmp/asapo/test_in/test2/file2
+echo -n hello1 > /tmp/asapo/test_in/processed/test1/file1
+echo -n hello2 > /tmp/asapo/test_in/processed/test1/file2
+echo -n hello3 > /tmp/asapo/test_in/processed/test2/file1
 
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 > out.txt
 cat out.txt
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
index dc674898e8a6de37125f77147c700529e6628394..379e69a1d5d65f0f24ce8e77414915eceeab4bfb 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
@@ -20,15 +20,17 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello1 > c:\tmp\asapo\test_in\test1\file1
-echo hello2 > c:\tmp\asapo\test_in\test1\file2
-echo hello3 > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+echo hello1 > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello2 > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello3 > c:\tmp\asapo\test_in\processed\test2\file2
+
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -51,8 +53,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 del /f out.txt
 
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
index 9addfcceb52b30c4449268cb140fa04700306a59..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/two_streams/check_windows.bat b/tests/automatic/full_chain/two_streams/check_windows.bat
index fd2b9268da99a3c4ab0f715ae04b05fc0d095895..43ef2ab6c529c154d9fe755caec622be5cdf445a 100644
--- a/tests/automatic/full_chain/two_streams/check_windows.bat
+++ b/tests/automatic/full_chain/two_streams/check_windows.bat
@@ -32,8 +32,6 @@ findstr /i /l /c:"Processed 1000 file(s)"  out1.txt || goto :error
 type out2.txt
 findstr /i /l /c:"Processed 900 file(s)"  out2.txt || goto :error
 
-
-
 goto :clean
 
 :error
diff --git a/tests/automatic/producer/aai/producer_aai.py b/tests/automatic/producer/aai/producer_aai.py
index 54bc6f7758aa18a9e72de3fa91005df18037581a..1734f757f158fddc4692063cd9a23c4dae8c39fd 100644
--- a/tests/automatic/producer/aai/producer_aai.py
+++ b/tests/automatic/producer/aai/producer_aai.py
@@ -26,19 +26,19 @@ def callback(header,err):
     lock.release()
 
 
-producer  = asapo_producer.create_producer(endpoint,'auto',beamline, stream, token, nthreads, 60)
+producer  = asapo_producer.create_producer(endpoint,'processed','auto',beamline, stream, token, nthreads, 60)
 
 producer.set_log_level("debug")
 
 #send single file
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 
 producer.wait_requests_finished(10000)
 
 time.sleep(2)
 
 #send single file to other beamtime - should be warning on duplicated request (same beamtime, no reauthorization)
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 producer.wait_requests_finished(10000)
 
 
@@ -54,7 +54,7 @@ with open(fname, 'w') as outfile:
 time.sleep(2)
 
 #send single file to other beamtime - now ok since receiver authorization timed out
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 
 producer.wait_requests_finished(10000)
 
diff --git a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
index 5e00ea491ea6a11a9e999003a37fae3d42e9381e..d8951bf79f91d7ae1fcbfb809a420a19f71a27ea 100644
--- a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
+++ b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
@@ -69,7 +69,8 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     auto producer = asapo::Producer::Create(args.discovery_service_endpoint, 1,
                                             args.mode == 0 ? asapo::RequestHandlerType::kTcp
                                             : asapo::RequestHandlerType::kFilesystem,
-                                            asapo::SourceCredentials{args.beamtime_id, "", "", ""}, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,
+                                                                     args.beamtime_id, "", "", ""}, 60, &err);
     if (err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index 4d063364180f3438f35397fd6dea25a65b862ea1..389c2350a74845b0f72b3f059b81d3fa8d6fd9c8 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -5,6 +5,7 @@ import sys
 import time
 import numpy as np
 import threading
+
 lock = threading.Lock()
 
 stream = sys.argv[1]
@@ -14,66 +15,73 @@ endpoint = sys.argv[3]
 token = ""
 nthreads = 8
 
-def assert_eq(val,expected,name):
-    print ("asserting eq for "+name)
+
+def assert_eq(val, expected, name):
+    print("asserting eq for " + name)
     if val != expected:
-        print ("error at "+name)
-        print ('val: ', val,' expected: ',expected)
+        print("error at " + name)
+        print('val: ', val, ' expected: ', expected)
         sys.exit(1)
 
-def callback(header,err):
-    lock.acquire() # to print
-    if isinstance(err,asapo_producer.AsapoServerWarning):
-        print("successfuly sent, but with warning from server: ",header,err)
+
+def callback(header, err):
+    lock.acquire()  # to print
+    if isinstance(err, asapo_producer.AsapoServerWarning):
+        print("successfuly sent, but with warning from server: ", header, err)
     elif err is not None:
-        print("could not sent: ",header,err)
+        print("could not sent: ", header, err)
     else:
-        print ("successfuly sent: ",header)
+        print("successfuly sent: ", header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads,60)
+
+producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', stream, token, nthreads, 60)
 
 producer.set_log_level("debug")
 
-#send single file
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+# send single file
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key":"test_val"}', callback=callback)
 
-#send single file without callback
-producer.send_file(10, local_path = "./file1", exposed_path = stream+"/"+"file10", user_meta = '{"test_key":"test_val"}',callback=None)
+# send single file without callback
+producer.send_file(10, local_path="./file1", exposed_path="processed/" + stream + "/" + "file10",
+                   user_meta='{"test_key":"test_val"}', callback=None)
 
-#send subsets
-producer.send_file(2, local_path = "./file1", exposed_path = stream+"/"+"file2",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
-producer.send_file(3, local_path = "./file1", exposed_path = stream+"/"+"file3",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
+# send subsets
+producer.send_file(2, local_path="./file1", exposed_path="processed/" + stream + "/" + "file2", subset=(2, 2),
+                   user_meta='{"test_key":"test_val"}', callback=callback)
+producer.send_file(3, local_path="./file1", exposed_path="processed/" + stream + "/" + "file3", subset=(2, 2),
+                   user_meta='{"test_key":"test_val"}', callback=callback)
 
-#send meta only
-producer.send_file(3, local_path = "./not_exist",exposed_path = "./whatever",
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send meta only
+producer.send_file(3, local_path="./not_exist", exposed_path="./whatever",
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-data = np.arange(10,dtype=np.float64)
+data = np.arange(10, dtype=np.float64)
 
-#send data from array
-producer.send_data(4, stream+"/"+"file5",data,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+# send data from array
+producer.send_data(4, "processed/" + stream + "/" + "file5", data,
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
-#send data from string
-producer.send_data(5, stream+"/"+"file6",b"hello",
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+# send data from string
+producer.send_data(5, "processed/" + stream + "/" + "file6", b"hello",
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
-#send metadata only
-producer.send_data(6, stream+"/"+"file7",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send metadata only
+producer.send_data(6, "processed/" + stream + "/" + "file7", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-#send single file/wrong filename
-producer.send_file(1, local_path = "./file2", exposed_path = stream+"/"+"file1", callback = callback)
+# send single file/wrong filename
+producer.send_file(1, local_path="./file2", exposed_path="processed/" + stream + "/" + "file1", callback=callback)
 
 x = np.array([[1, 2, 3], [4, 5, 6]], np.float32)
-producer.send_data(8, stream+"/"+"file8",x,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+producer.send_data(8, "processed/" + stream + "/" + "file8", x,
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
 try:
     x = x.T
-    producer.send_data(8, stream+"/"+"file8",x,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+    producer.send_data(8, "processed/" + stream + "/" + "file8", x,
+                       ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
@@ -81,55 +89,52 @@ else:
     sys.exit(1)
 
 try:
-    producer.send_file(0, local_path = "./not_exist",exposed_path = "./whatever",
-                       ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+    producer.send_file(0, local_path="./not_exist", exposed_path="./whatever",
+                       ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
     print("should be error sending id 0 ")
     sys.exit(1)
 
-#send to another substream
-producer.send_data(1, stream+"/"+"file9",None,
-                   ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, substream="stream", callback = callback)
+# send to another substream
+producer.send_data(1, "processed/" + stream + "/" + "file9", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, substream="stream", callback=callback)
 
 # wait normal requests finished before sending duplicates
 
 producer.wait_requests_finished(50000)
 
-#send single file once again
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
-#send metadata only once again
-producer.send_data(6, stream+"/"+"file7",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send single file once again
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key":"test_val"}', callback=callback)
+# send metadata only once again
+producer.send_data(6, "processed/" + stream + "/" + "file7", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-#send same id different data
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key1":"test_val"}', callback = callback)#send same id different data
-producer.send_data(6, stream+"/"+"file8",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send same id different data
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key1":"test_val"}', callback=callback)  # send same id different data
+producer.send_data(6, "processed/" + stream + "/" + "file8", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
 producer.wait_requests_finished(50000)
 n = producer.get_requests_queue_size()
-assert_eq(n,0,"requests in queue")
+assert_eq(n, 0, "requests in queue")
 
 info = producer.stream_info()
-assert_eq(info['lastId'],10,"last id")
+assert_eq(info['lastId'], 10, "last id")
 
 info = producer.stream_info('stream')
-assert_eq(info['lastId'],1,"last id from different substream")
-
+assert_eq(info['lastId'], 1, "last id from different substream")
 
 # create with error
 try:
-    producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, 0,0)
+    producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', stream, token, 0, 0)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
     print("should be error")
     sys.exit(1)
 
-
-print ('Finished successfully')
-
-
-
+print('Finished successfully')
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
index 27dc92b8656704a96ddd5699bbba98d0a73043d2..760641d804228972045bae8bc8860a32ce4a4d91 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
@@ -40,8 +40,8 @@ mkdir -p ${receiver_folder}
 
 $1 localhost:8400 ${beamtime_id} 100 1 1  0 30 3
 
-ls -ln ${receiver_folder}/1_1 | awk '{ print $5 }'| grep 100000
-ls -ln ${receiver_folder}/1_2 | awk '{ print $5 }'| grep 100000
-ls -ln ${receiver_folder}/1_3 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_1 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_2 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_3 | awk '{ print $5 }'| grep 100000
 
 echo 'db.data_default.find({"images._id":{$gt:0}},{"images.name":1})' | mongo asapo_test_detector | grep 1_1 | grep 1_2 | grep 1_3
\ No newline at end of file
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
index c675100ba2f845d2329cb97c658f9aa4e38a34de..7ae8f9792ea89892dcb89ae446dcf94f71d21614 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
@@ -15,13 +15,13 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_1') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_2') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_2') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_3') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_3') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
 
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
index 0b764bdd5711e8ff2246baa09bc001155c277080..f90b44fbde5c15a138e23e4acf6adf2bc91e9a63 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
@@ -39,6 +39,6 @@ sleep 1
 
 $1 localhost:8400 ${beamtime_id} 100 1 1  0 30
 
-ls -ln ${receiver_folder}/1 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1 | awk '{ print $5 }'| grep 100000
 
 $1 localhost:8400 wrong_beamtime_id 100 1 1 0 1 2>&1 | tee /dev/stderr | grep "authorization"
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
index 75c4b4c2a45e4c6b5c3b1421bd6dd0e33f4dc5b9..96226b7b346196ae8da728587b8db1982c058454 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
@@ -15,7 +15,7 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
 "%1" localhost:8400 wrong_id 100 1 1 0 2 2>1 | findstr /c:"authorization"  || goto :error
diff --git a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
index 59e9600aa36a97a8500811f1e74c628b303d2c69..3eb89fba8de7fe5fd7b7f7774f933dbdf367d7e8 100644
--- a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
@@ -45,4 +45,4 @@ cat out
 cat out | grep '"buf_id" : 0'
 cat out | grep user_meta
 
-ls -ln ${receiver_folder}/1 | awk '{ print $5 }'| grep 60000000
+ls -ln ${receiver_folder}/processed/1 | awk '{ print $5 }'| grep 60000000
diff --git a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
index bc501b842e9d4a4e61aab6630ae8632202cbcdae..309236645ef298447f405c80657e0efb53fc7586 100644
--- a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
@@ -15,7 +15,7 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 60000000 goto :error
 
 echo db.data_default.find({"_id":1}) |  %mongo_exe% %beamtime_id%_detector  > out
diff --git a/tests/manual/performance_broker_receiver/getlast_broker.cpp b/tests/manual/performance_broker_receiver/getlast_broker.cpp
index 1adcda25b2a1edee2db8379dfabc6229fa565987..1626dc51d32ba5a1a08514bcef7e8d46747329bb 100644
--- a/tests/manual/performance_broker_receiver/getlast_broker.cpp
+++ b/tests/manual/performance_broker_receiver/getlast_broker.cpp
@@ -48,7 +48,7 @@ std::vector<std::thread> StartThreads(const Args& params,
         asapo::FileInfo fi;
         Error err;
         auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.file_path, true,
-                      asapo::SourceCredentials{params.beamtime_id, "", "", params.token}, &err);
+                      asapo::SourceCredentials{asapo::SourceType::kProcessed,params.beamtime_id, "", "", params.token}, &err);
         broker->SetTimeout((uint64_t) params.timeout_ms);
         asapo::FileData data;
 
diff --git a/tests/manual/producer_cpp/CMakeLists.txt b/tests/manual/producer_cpp/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..07d230c4b413da52d97373a31ed4e31d6a517dfd
--- /dev/null
+++ b/tests/manual/producer_cpp/CMakeLists.txt
@@ -0,0 +1,11 @@
+set(TARGET_NAME producer)
+set(SOURCE_FILES
+        producer.cpp
+        )
+
+add_executable(${TARGET_NAME} ${SOURCE_FILES})
+target_include_directories(${TARGET_NAME} PUBLIC include ${CMAKE_SOURCE_DIR}/common/cpp/include)
+
+#Add all necessary common libraries
+GET_PROPERTY(ASAPO_COMMON_IO_LIBRARIES GLOBAL PROPERTY ASAPO_COMMON_IO_LIBRARIES)
+target_link_libraries(${TARGET_NAME} ${ASAPO_COMMON_IO_LIBRARIES} asapo-producer)
diff --git a/tests/manual/producer_cpp/producer.cpp b/tests/manual/producer_cpp/producer.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..357b857f6d419c344c9463d6f22687f08ff4cdd6
--- /dev/null
+++ b/tests/manual/producer_cpp/producer.cpp
@@ -0,0 +1,111 @@
+#include <thread>
+#include <chrono>
+#include "asapo_producer.h"
+
+
+void ProcessAfterSend(asapo::RequestCallbackPayload payload, asapo::Error err) {
+    if (err) {
+        std::cerr << "error/warning during send: " << err << std::endl;
+        return;
+    } else {
+        std::cout << "successfuly send " << payload.original_header.Json() << std::endl;
+        return;
+    }
+}
+
+void exit_if_error(std::string error_string, const asapo::Error& err) {
+    if (err) {
+        std::cerr << error_string << err << std::endl;
+        //exit(EXIT_FAILURE);
+    }
+}
+
+std::string format_string(uint32_t in, std::string format="%05d")
+{
+    if(in > 99999)
+        in = 0;
+
+    char buf[6];
+    snprintf(buf,sizeof(buf),format.c_str(),in);
+    return std::string(buf);
+
+}
+
+
+int main(int argc, char* argv[]) {
+
+    uint32_t submodule = 1;
+    uint32_t sleeptime = 1;
+
+
+    if(argc >= 2)
+        submodule = atoi(argv[1]);
+
+    if(argc >=3)
+        sleeptime = atoi(argv[2]);
+
+
+    asapo::Error err;
+
+    auto endpoint = "localhost:8400"; // or your endpoint
+    auto beamtime = "asapo_test";
+
+    auto producer = asapo::Producer::Create(endpoint, 1,asapo::RequestHandlerType::kTcp,
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,beamtime, "", "", ""}, 60, &err);
+    exit_if_error("Cannot start producer", err);
+
+    uint32_t eventid = 1;
+    uint32_t start_number = 1;
+
+    // number of files per acquistion per module
+    const uint32_t number_of_splitted_files = 5;
+
+    // number of modules
+    const uint32_t modules = 3;
+
+    while(true)
+    {
+        for(uint32_t part=1; part<=number_of_splitted_files; ++part)
+        {
+            std::string to_send = "processed/lambdatest_"
+                + format_string(start_number) // file start number (acquistion id)
+                + "_part" + format_string(part) // file part id (chunk id)
+                + "_m" + format_string(submodule, std::string("%02d"));
+            auto send_size = to_send.size() + 1;
+            auto buffer =  asapo::FileData(new uint8_t[send_size]);
+            memcpy(buffer.get(), to_send.c_str(), send_size);
+            std::string substream = std::to_string(start_number);
+            // std::cout<<"submodule:"<<submodule
+            //          <<"- substream:"<<substream
+            //          <<"- filename:"<<to_send<<std::endl;
+
+            asapo::EventHeader event_header{submodule, send_size, to_send,"", part,modules};
+            // err = producer->SendData(event_header,substream, std::move(buffer),
+            //                          asapo::kTransferMetaDataOnly, &ProcessAfterSend);
+
+            err = producer->SendData(event_header,substream, std::move(buffer),
+                                     asapo::kDefaultIngestMode, &ProcessAfterSend);
+            exit_if_error("Cannot send file", err);
+
+            err = producer->WaitRequestsFinished(1000);
+            exit_if_error("Producer exit on timeout", err);
+            std::this_thread::sleep_for (std::chrono::seconds(sleeptime));
+
+            // if(part == number_of_splitted_files)
+            // {
+
+            //     err = producer->SendSubstreamFinishedFlag(substream,
+            //                                               part,
+            //                                               std::to_string(start_number+1),
+            //                                               &ProcessAfterSend);
+            //     exit_if_error("Cannot send file", err);
+            // }
+
+        }
+        start_number++;
+
+    }
+
+
+    return EXIT_SUCCESS;
+}
diff --git a/tests/manual/python_tests/producer/short_test.py b/tests/manual/python_tests/producer/short_test.py
index 849b22c359c3a2039c262a0974839a7a0330237b..14d50d3927e7af70727e1867551facde99b60bea 100644
--- a/tests/manual/python_tests/producer/short_test.py
+++ b/tests/manual/python_tests/producer/short_test.py
@@ -27,7 +27,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads ,0)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads ,0)
 
 producer.set_log_level("debug")
 
diff --git a/tests/manual/python_tests/producer/test.py b/tests/manual/python_tests/producer/test.py
index 2d364a7d7e4827f655e79b38229d88b7db457214..da68de94514b4c4a95c14f061db61b995cf263c0 100644
--- a/tests/manual/python_tests/producer/test.py
+++ b/tests/manual/python_tests/producer/test.py
@@ -27,7 +27,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads ,0)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads ,0)
 
 producer.set_log_level("info")
 
diff --git a/tests/manual/python_tests/producer_wait_bug_mongo/test.py b/tests/manual/python_tests/producer_wait_bug_mongo/test.py
index 9e420f33ccef2879ae05e46fa3a616edf241d88d..06d658cbc95aa68921d16f2d42a984ee62f92191 100644
--- a/tests/manual/python_tests/producer_wait_bug_mongo/test.py
+++ b/tests/manual/python_tests/producer_wait_bug_mongo/test.py
@@ -27,7 +27,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("debug")
 
diff --git a/tests/manual/python_tests/producer_wait_threads/producer_api.py b/tests/manual/python_tests/producer_wait_threads/producer_api.py
index 85ccd36c07dfe93c0018d1bb017fedbb8e18f11b..22fc727437f2f18fffa8c31017c1031a1b59c7dc 100644
--- a/tests/manual/python_tests/producer_wait_threads/producer_api.py
+++ b/tests/manual/python_tests/producer_wait_threads/producer_api.py
@@ -22,7 +22,7 @@ def callback(header,err):
         print ("successfuly sent: ",header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime, 'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime, 'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("info")
 
@@ -63,7 +63,7 @@ if n!=0:
 
 # create with error
 try:
-    producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, 0, 600)
+    producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, 0, 600)
 except Exception as Asapo:
     print(e)
 else:
diff --git a/tests/manual/python_tests/producer_wait_threads/test.py b/tests/manual/python_tests/producer_wait_threads/test.py
index 5ebe7b95caec871caed2240f232a8494b05857bf..d1fbaf05b81c169b0f7295b867fe9b091fc788a8 100644
--- a/tests/manual/python_tests/producer_wait_threads/test.py
+++ b/tests/manual/python_tests/producer_wait_threads/test.py
@@ -22,7 +22,7 @@ def callback(header,err):
         print ("successfuly sent: ",header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("info")