diff --git a/3d_party/ldap/record.ldif b/3d_party/ldap/record.ldif
new file mode 100644
index 0000000000000000000000000000000000000000..c694bba97b8c8588b5b5f3edb54bfbeb5b1115ab
--- /dev/null
+++ b/3d_party/ldap/record.ldif
@@ -0,0 +1,21 @@
+dn: ou=rgy,o=desy,c=de
+objectclass: organizationalUnit
+ou: rgy
+
+dn: ou=netgroup,ou=rgy,o=desy,c=de
+objectclass: organizationalUnit
+ou: netgroup
+
+dn: cn=a3p00-hosts,ou=netgroup,ou=rgy,o=desy,c=de
+objectClass :top
+objectClass :nisNetgroup
+cn:a3p00-hosts
+description: Netgroup for nodes on PETRA III Beamline P00
+nisNetgroupTriple: (localhost,-,)
+
+dn: cn=a3p07-hosts,ou=netgroup,ou=rgy,o=desy,c=de
+objectClass :top
+objectClass :nisNetgroup
+cn:a3p07-hosts
+description: Netgroup for nodes on PETRA III Beamline P07
+nisNetgroupTriple: (localhost,-,)
diff --git a/3d_party/ldap/scripts.sh b/3d_party/ldap/scripts.sh
new file mode 100644
index 0000000000000000000000000000000000000000..a0dd2db53d25c9da879faa26fd75dd095fe245d9
--- /dev/null
+++ b/3d_party/ldap/scripts.sh
@@ -0,0 +1,5 @@
+/opt/asapo/ldap/slapd -f /opt/asapo/ldap/ldap.conf
+
+ldapadd -x -D "ou=rgy,o=desy,c=de" -f record.ldif -h localhost
+
+ldapsearch -x -b ou=rgy,o=DESY,c=DE cn=a3p00-hosts -h localhost
\ No newline at end of file
diff --git a/3d_party/ldap/scripts_mac.sh b/3d_party/ldap/scripts_mac.sh
new file mode 100644
index 0000000000000000000000000000000000000000..2bfb03afb6faef6c12b746d642b87a19cdf5e36d
--- /dev/null
+++ b/3d_party/ldap/scripts_mac.sh
@@ -0,0 +1,5 @@
+/usr/libexec/slapd -d3 -f /Users/yakubov/Projects/asapo/3d_party/ldap/slapd.conf
+
+ldapadd -x -D "ou=rgy,o=desy,c=de" -f record.ldif
+
+ldapsearch -x -b ou=rgy,o=DESY,c=DE cn=a3p00-hosts
\ No newline at end of file
diff --git a/3d_party/ldap/slapd.conf b/3d_party/ldap/slapd.conf
new file mode 100644
index 0000000000000000000000000000000000000000..97608116b7f16fa4049a77414fe4c83129b34776
--- /dev/null
+++ b/3d_party/ldap/slapd.conf
@@ -0,0 +1,16 @@
+include /etc/ldap/schema/core.schema
+include /etc/ldap/schema/cosine.schema
+include /etc/ldap/schema/nis.schema
+
+modulepath /usr/lib/ldap
+moduleload back_bdb.la
+
+access to * by * write
+access to * by * manage
+access to * by * read
+allow bind_anon_cred
+allow bind_anon_dn
+allow update_anon
+
+database    bdb
+suffix      "ou=rgy,o=desy,c=de"
diff --git a/3d_party/ldap/slapd_mac.conf b/3d_party/ldap/slapd_mac.conf
new file mode 100644
index 0000000000000000000000000000000000000000..aa35b7164efbca5d6763545999bf950c3ea0a443
--- /dev/null
+++ b/3d_party/ldap/slapd_mac.conf
@@ -0,0 +1,13 @@
+include /etc/openldap/schema/core.schema
+include /etc/openldap/schema/cosine.schema
+include /etc/openldap/schema/nis.schema
+
+access to * by * write
+access to * by * manage
+access to * by * read
+allow bind_anon_cred
+allow bind_anon_dn
+allow update_anon
+
+database    bdb
+suffix      "ou=rgy,o=desy,c=de"
diff --git a/3d_party/ldap/slapd_win.conf b/3d_party/ldap/slapd_win.conf
new file mode 100644
index 0000000000000000000000000000000000000000..b07a2f7775c85d6fbfd1a03ce2bb2c5204555501
--- /dev/null
+++ b/3d_party/ldap/slapd_win.conf
@@ -0,0 +1,18 @@
+# read https://github.com/cristal-ise/kernel/wiki/Install-OpenLDAP
+
+include /etc/openldap/schema/core.schema
+include /etc/openldap/schema/cosine.schema
+include /etc/openldap/schema/nis.schema
+
+modulepath /usr/lib/ldap
+moduleload back_bdb.la
+
+access to * by * write
+access to * by * manage
+access to * by * read
+allow bind_anon_cred
+allow bind_anon_dn
+allow update_anon
+
+database    bdb
+suffix      "ou=rgy,o=desy,c=de"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index aa2f1d2369846a9fc13884785b7ebdb703a222d1..05f33531dcd747549c82de798351c02c44eb8498 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,7 +1,9 @@
 ##20.09.0
 
 FEATURES
-* implemented data resend - data will be redelivered if it is not acknowledged during a given period or a consumer sent a negative acknowledge
+* implemented negative acknowledges and data redelivery - data will be redelivered automatically for get_next calls if it is not acknowledged during a given period or a consumer sent a negative acknowledge  
+* introduced data source types - "raw" data is written to beamline filesystem and this can only be done from a certain IPs (detector PC,..),
+"processed" data is written to core filesystem. File paths must now start with  `raw/`  or  `processed/`
 * Added RDMA support for the communication between consumer and receiver. (Improves transfer speeds while using less CPU resources)
   Requires LibFabric v1.11.0
   Receiver must have network mode 'Fabric' enabled and RDMAable AdvertiseURI. See config `DataServer.{AdvertiseURI, NetworkMode}`
@@ -12,6 +14,10 @@ FEATURES
 BUG FIXES
 * fix data query images when beamtime_id starts with number 
 
+BREAKING CHANGES
+* an extra parameter in producer constructor for data source type
+* path of the files that are send from producer to asapo must start with `raw/` for raw source type or `processed/` for processed source type, otherwise the files will not be written and an error will be sent back 
+
 ## 20.06.3
 
 BUG FIXES
diff --git a/CMakeModules/CodeCoverage.cmake b/CMakeModules/CodeCoverage.cmake
index c170a8a583bbf2ea89338fdfc664ddddfe8afef0..63f4a9815646a623b4af2942a5ee6c8146b592cf 100644
--- a/CMakeModules/CodeCoverage.cmake
+++ b/CMakeModules/CodeCoverage.cmake
@@ -12,7 +12,7 @@
 #    and/or other materials provided with the distribution.
 #
 # 3. Neither the name of the copyright holder nor the names of its contributors
-#    may be used to endorse or promote products derived from this software without
+#    may be used to endorse or promote products processed from this software without
 #    specific prior written permission.
 #
 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
diff --git a/authorizer/src/asapo_authorizer/common/error.go b/authorizer/src/asapo_authorizer/common/error.go
new file mode 100644
index 0000000000000000000000000000000000000000..ff551abc78fa443cd86b5b018697c28cfa70a550
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/common/error.go
@@ -0,0 +1,10 @@
+package common
+
+type ServerError struct {
+	Code int
+	Message    string
+}
+
+func (e *ServerError) Error() string {
+	return e.Message
+}
diff --git a/authorizer/src/asapo_authorizer/ldap_client/ldap_client.go b/authorizer/src/asapo_authorizer/ldap_client/ldap_client.go
new file mode 100644
index 0000000000000000000000000000000000000000..2c8490814c082a95e33fd17f0d81211ddfab0e17
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/ldap_client/ldap_client.go
@@ -0,0 +1,5 @@
+package ldap_client
+
+type LdapClient interface {
+	GetAllowedIpsForBeamline(url string,base string, filter string) ([]string, error)
+}
diff --git a/authorizer/src/asapo_authorizer/ldap_client/mock_client.go b/authorizer/src/asapo_authorizer/ldap_client/mock_client.go
new file mode 100644
index 0000000000000000000000000000000000000000..ad476e71dae2720303c67a46bf8f7dc9c300394a
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/ldap_client/mock_client.go
@@ -0,0 +1,12 @@
+package ldap_client
+
+import "github.com/stretchr/testify/mock"
+
+type MockedLdapClient struct {
+	mock.Mock
+}
+
+func (c *MockedLdapClient) GetAllowedIpsForBeamline(url string,base string,filter string) ([]string, error) {
+	args := c.Called(url,base,filter)
+	return args.Get(0).([]string), args.Error(1)
+}
diff --git a/authorizer/src/asapo_authorizer/ldap_client/openldap_client.go b/authorizer/src/asapo_authorizer/ldap_client/openldap_client.go
new file mode 100644
index 0000000000000000000000000000000000000000..f817a63fe008aa0eeeafffbd8165f49ffb378376
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/ldap_client/openldap_client.go
@@ -0,0 +1,55 @@
+package ldap_client
+
+import (
+	"asapo_authorizer/common"
+	"asapo_common/utils"
+	"net"
+	"strings"
+)
+import "github.com/go-ldap/ldap"
+
+type OpenLdapClient struct {
+}
+
+func (c *OpenLdapClient) GetAllowedIpsForBeamline(url string,base string,filter string) ([]string, error) {
+	l, err := ldap.DialURL(url)
+	if err != nil {
+		return []string{},&common.ServerError{utils.StatusServiceUnavailable, err.Error()}
+	}
+	defer l.Close()
+
+	searchRequest := ldap.NewSearchRequest(
+		base,
+		ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false,
+		filter,
+		[]string{"nisNetgroupTriple"},
+		nil,
+	)
+
+	sr, err := l.Search(searchRequest)
+	if err != nil {
+		if ldap.IsErrorWithCode(err,ldap.LDAPResultNoSuchObject) {
+			return []string{},nil
+		} else {
+			return []string{},err
+		}
+	}
+
+	res := make([]string,0)
+	for _, entry := range sr.Entries {
+		host := entry.GetAttributeValue("nisNetgroupTriple")
+		host = strings.TrimPrefix(host,"(")
+		host = strings.Split(host, ",")[0]
+		addrs,err := net.LookupIP(host)
+
+		if err != nil {
+			return []string{},err
+		}
+		for _, addr := range addrs {
+			if ipv4 := addr.To4(); ipv4 != nil {
+				res = append(res,ipv4.String())
+			}
+		}
+	}
+	return res,nil
+}
diff --git a/authorizer/src/asapo_authorizer/ldap_client/openldap_client_test.go b/authorizer/src/asapo_authorizer/ldap_client/openldap_client_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..fc13cd444da6a2d492f268999483c35a8c83d3d5
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/ldap_client/openldap_client_test.go
@@ -0,0 +1,32 @@
+package ldap_client
+
+import (
+	"asapo_authorizer/common"
+	"asapo_common/utils"
+	"github.com/stretchr/testify/assert"
+	"testing"
+)
+
+func TestOpenLDAP(t *testing.T) {
+	lc := new(OpenLdapClient)
+	uri := "ldap://localhost:389"
+	base := "ou=rgy,o=desy,c=de"
+	filter:= "(cn=a3p00-hosts)"
+	expected_ips := []string{"127.0.0.1"}
+	res,err := lc.GetAllowedIpsForBeamline(uri,base,filter)
+	assert.Nil(t,err)
+	assert.Equal(t,expected_ips,res)
+}
+
+func TestOpenLDAPCannotDeal(t *testing.T) {
+	lc := new(OpenLdapClient)
+	uri := "ldap://localhost1:3891"
+	base := "ou=rgy,o=desy,c=de"
+	filter:= "(cn=a3p00-hosts)"
+	_,err := lc.GetAllowedIpsForBeamline(uri,base,filter)
+	se,ok:= err.(*common.ServerError)
+	assert.Equal(t,ok,true)
+	if  ok {
+		assert.Equal(t,utils.StatusServiceUnavailable,se.Code)
+	}
+}
diff --git a/authorizer/src/asapo_authorizer/server/authorize.go b/authorizer/src/asapo_authorizer/server/authorize.go
index 42ee3cd020546075e3a30e72a20fddd68eefe74c..394b652a7c3c45b7cef75545c3ebb235e2839122 100644
--- a/authorizer/src/asapo_authorizer/server/authorize.go
+++ b/authorizer/src/asapo_authorizer/server/authorize.go
@@ -1,6 +1,7 @@
 package server
 
 import (
+	"asapo_authorizer/common"
 	log "asapo_common/logger"
 	"asapo_common/utils"
 	"errors"
@@ -14,6 +15,7 @@ type SourceCredentials struct {
 	Beamline   string
 	Stream     string
 	Token      string
+	Type 	   string
 }
 
 type authorizationRequest struct {
@@ -24,10 +26,10 @@ type authorizationRequest struct {
 func getSourceCredentials(request authorizationRequest) (SourceCredentials, error) {
 	vals := strings.Split(request.SourceCredentials, "%")
 
-	if len(vals) != 4 {
+	if len(vals) != 5 {
 		return SourceCredentials{}, errors.New("cannot get source credentials from " + request.SourceCredentials)
 	}
-	creds := SourceCredentials{vals[0], vals[1], vals[2], vals[3]}
+	creds := SourceCredentials{vals[1], vals[2], vals[3], vals[4],vals[0]}
 	if creds.Stream == "" {
 		creds.Stream = "detector"
 	}
@@ -53,20 +55,6 @@ func splitHost(hostPort string) string {
 	return s[0]
 }
 
-func getBeamlineFromIP(ip string) (string, error) {
-	host := splitHost(ip)
-	lines, err := utils.ReadStringsFromFile(settings.IpBeamlineMappingFolder + string(filepath.Separator) + host)
-	if err != nil {
-		return "", err
-	}
-
-	if len(lines) < 1 || len(lines[0]) == 0 {
-		return "", errors.New("file is empty")
-	}
-
-	return lines[0], nil
-}
-
 func beamtimeMetaFromJson(fname string) (beamtimeMeta, error) {
 	var meta beamtimeMeta
 	err := utils.ReadJsonFromFile(fname, &meta)
@@ -125,7 +113,6 @@ func findBeamtimeMetaFromBeamline(beamline string) (beamtimeMeta, error) {
 	if err != nil || len(matches) != 1 {
 		return beamtimeMeta{}, err
 	}
-
 	meta, err := beamtimeMetaFromJson(matches[0])
 	if (err != nil) {
 		return beamtimeMeta{}, err
@@ -138,21 +125,23 @@ func alwaysAllowed(creds SourceCredentials) (beamtimeMeta, bool) {
 	for _, pair := range settings.AlwaysAllowedBeamtimes {
 		if pair.BeamtimeId == creds.BeamtimeId {
 			pair.Stream = creds.Stream
+			pair.Type = creds.Type
 			return pair, true
 		}
 	}
 	return beamtimeMeta{}, false
 }
 
-func authorizeByHost(host, beamline string) (error) {
-	active_beamline, err := getBeamlineFromIP(host)
+func authorizeByHost(host_ip, beamline string) (error) {
+	filter := strings.Replace(settings.Ldap.FilterTemplate,"__BEAMLINE__",beamline,1)
+	allowed_ips, err := ldapClient.GetAllowedIpsForBeamline(settings.Ldap.Uri,settings.Ldap.BaseDn, filter)
 	if err != nil {
-		log.Error("cannot find active beamline for " + host + " - " + err.Error())
+		log.Error("cannot get list of allowed hosts from LDAP: " + err.Error())
 		return err
 	}
 
-	if (active_beamline != beamline) {
-		err_string := "beamine for host " + host + " - " + active_beamline + " does not match " + beamline
+	if (!utils.StringInSlice(splitHost(host_ip),allowed_ips)) {
+		err_string := "beamine " +beamline+" not allowed for host " + host_ip
 		log.Error(err_string)
 		return errors.New(err_string)
 	}
@@ -160,7 +149,7 @@ func authorizeByHost(host, beamline string) (error) {
 }
 
 func needHostAuthorization(creds SourceCredentials) bool {
-	return strings.HasPrefix(creds.Stream, "detector") || len(creds.Token) == 0
+	return creds.Type == "raw" || len(creds.Token) == 0
 }
 
 func authorizeByToken(creds SourceCredentials) error {
@@ -200,16 +189,35 @@ func findMeta(creds SourceCredentials) (beamtimeMeta, error) {
 		meta, err = findBeamtimeMetaFromBeamline(creds.Beamline)
 	}
 
+	if creds.Type == "processed" {
+		meta.OnlinePath = ""
+	}
+
 	if (err != nil) {
 		log.Error(err.Error())
 		return beamtimeMeta{}, err
 	}
 
+	meta.Stream = creds.Stream
+	meta.Type = creds.Type
+
 	return meta, nil
 }
 
 func authorizeMeta(meta beamtimeMeta, request authorizationRequest, creds SourceCredentials) error {
 
+	if creds.Type=="raw" && meta.OnlinePath=="" {
+		err_string := "beamtime "+meta.BeamtimeId+" is not online"
+		log.Error(err_string)
+		return errors.New(err_string)
+	}
+
+	if creds.Beamline != "auto" && meta.Beamline != creds.Beamline {
+		err_string := "given beamline (" + creds.Beamline + ") does not match the found one (" + meta.Beamline + ")"
+		log.Debug(err_string)
+		return errors.New(err_string)
+	}
+
 	if needHostAuthorization(creds) {
 		if err := authorizeByHost(request.OriginHost, meta.Beamline); err != nil {
 			return err
@@ -220,11 +228,6 @@ func authorizeMeta(meta beamtimeMeta, request authorizationRequest, creds Source
 		}
 	}
 
-	if creds.Beamline != "auto" && meta.Beamline != creds.Beamline {
-		err_string := "given beamline (" + creds.Beamline + ") does not match the found one (" + meta.Beamline + ")"
-		log.Debug(err_string)
-		return errors.New(err_string)
-	}
 	return nil
 }
 
@@ -242,9 +245,7 @@ func authorize(request authorizationRequest, creds SourceCredentials) (beamtimeM
 		return beamtimeMeta{}, err
 	}
 
-	meta.Stream = creds.Stream
-
-	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " + meta.Beamline)
+	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " + meta.Beamline+", type "+meta.Type)
 	return meta, nil
 }
 
@@ -264,6 +265,11 @@ func routeAuthorize(w http.ResponseWriter, r *http.Request) {
 
 	beamtimeInfo, err := authorize(request, creds)
 	if (err != nil) {
+		serr,ok:=err.(*common.ServerError)
+		if ok {
+			utils.WriteServerError(w,err,serr.Code)
+			return
+		}
 		utils.WriteServerError(w,err,http.StatusUnauthorized)
 		return
 	}
diff --git a/authorizer/src/asapo_authorizer/server/authorize_test.go b/authorizer/src/asapo_authorizer/server/authorize_test.go
index ca2c21f7689272ce12281f597e91fcdda7bb4293..d3b8e36294e79e8c99a39c16a128135ba64b4671 100644
--- a/authorizer/src/asapo_authorizer/server/authorize_test.go
+++ b/authorizer/src/asapo_authorizer/server/authorize_test.go
@@ -1,6 +1,8 @@
 package server
 
 import (
+	"asapo_authorizer/common"
+	"asapo_authorizer/ldap_client"
 	"asapo_common/utils"
 	"github.com/stretchr/testify/assert"
 	"io/ioutil"
@@ -18,6 +20,8 @@ func prepareToken(beamtime_or_beamline string) string{
 	return token
 }
 
+var mockClient = new(ldap_client.MockedLdapClient)
+
 
 type request struct {
 	path    string
@@ -48,24 +52,22 @@ func doPostRequest(path string,buf string) *httptest.ResponseRecorder {
 	return w
 }
 
-
-
 var credTests = [] struct {
 	request string
 	cred SourceCredentials
 	ok bool
 	message string
 } {
-	{"asapo_test%auto%%", SourceCredentials{"asapo_test","auto","detector",""},true,"auto beamline, stream and no token"},
-	{"asapo_test%auto%%token", SourceCredentials{"asapo_test","auto","detector","token"},true,"auto beamline, stream"},
-	{"asapo_test%auto%stream%", SourceCredentials{"asapo_test","auto","stream",""},true,"auto beamline, no token"},
-	{"asapo_test%auto%stream%token", SourceCredentials{"asapo_test","auto","stream","token"},true,"auto beamline,stream, token"},
-	{"asapo_test%beamline%stream%token", SourceCredentials{"asapo_test","beamline","stream","token"},true,"all set"},
-	{"auto%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token"},true,"auto beamtime"},
-	{"auto%auto%stream%token", SourceCredentials{},false,"auto beamtime and beamline"},
-	{"%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token"},true,"empty beamtime"},
-	{"asapo_test%%stream%token", SourceCredentials{"asapo_test","auto","stream","token"},true,"empty bealine"},
-	{"%%stream%token", SourceCredentials{},false,"both empty"},
+	{"processed%asapo_test%auto%%", SourceCredentials{"asapo_test","auto","detector","","processed"},true,"auto beamline, stream and no token"},
+	{"processed%asapo_test%auto%%token", SourceCredentials{"asapo_test","auto","detector","token","processed"},true,"auto beamline, stream"},
+	{"processed%asapo_test%auto%stream%", SourceCredentials{"asapo_test","auto","stream","","processed"},true,"auto beamline, no token"},
+	{"processed%asapo_test%auto%stream%token", SourceCredentials{"asapo_test","auto","stream","token","processed"},true,"auto beamline,stream, token"},
+	{"processed%asapo_test%beamline%stream%token", SourceCredentials{"asapo_test","beamline","stream","token","processed"},true,"all set"},
+	{"processed%auto%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token","processed"},true,"auto beamtime"},
+	{"raw%auto%auto%stream%token", SourceCredentials{},false,"auto beamtime and beamline"},
+	{"raw%%beamline%stream%token", SourceCredentials{"auto","beamline","stream","token","raw"},true,"empty beamtime"},
+	{"raw%asapo_test%%stream%token", SourceCredentials{"asapo_test","auto","stream","token","raw"},true,"empty bealine"},
+	{"raw%%%stream%token", SourceCredentials{},false,"both empty"},
 }
 
 func TestSplitCreds(t *testing.T) {
@@ -84,8 +86,8 @@ func TestSplitCreds(t *testing.T) {
 }
 
 func TestAuthorizeDefaultOK(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf"}})
-	request :=  makeRequest(authorizationRequest{"asapo_test%%%","host"})
+	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf",""}})
+	request :=  makeRequest(authorizationRequest{"processed%asapo_test%%%","host"})
 	w := doPostRequest("/authorize",request)
 
 	body, _ := ioutil.ReadAll(w.Body)
@@ -93,6 +95,7 @@ func TestAuthorizeDefaultOK(t *testing.T) {
 	assert.Contains(t, string(body), "asapo_test", "")
 	assert.Contains(t, string(body), "beamline", "")
 	assert.Contains(t, string(body), "detector", "")
+	assert.Contains(t, string(body), "processed", "")
 
 	assert.Equal(t, http.StatusOK, w.Code, "")
 }
@@ -104,64 +107,6 @@ var beamtime_meta_online =`
 }
 `
 
-var authTests = [] struct {
-	beamtime_id string
-	beamline string
-	stream string
-	token string
-	status int
-	message string
-}{
-	{"test","auto","stream", prepareToken("test"),http.StatusOK,"user stream with correct token"},
-	{"test_online","auto","stream", prepareToken("test_online"),http.StatusOK,"with online path"},
-	{"test1","auto","stream", prepareToken("test1"),http.StatusUnauthorized,"correct token, beamtime not found"},
-	{"test","auto","stream", prepareToken("wrong"),http.StatusUnauthorized,"user stream with wrong token"},
-	{"test","auto","detector_aaa", prepareToken("test"),http.StatusUnauthorized,"detector stream with correct token and wroung source"},
-	{"test","bl1","stream", prepareToken("test"),http.StatusOK,"correct beamline given"},
-	{"test","bl2","stream", prepareToken("test"),http.StatusUnauthorized,"incorrect beamline given"},
-}
-func TestAuthorizeWithToken(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{})
-	settings.RootBeamtimesFolder ="."
-	settings.CurrentBeamlinesFolder="."
-	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test"), os.ModePerm)
-	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test_online"), os.ModePerm)
-
-	os.MkdirAll(filepath.Clean("bl1/current"), os.ModePerm)
-	ioutil.WriteFile(filepath.Clean("bl1/current/beamtime-metadata-test_online.json"), []byte(beamtime_meta_online), 0644)
-
-	defer 	os.RemoveAll("tf")
-	defer 	os.RemoveAll("bl1")
-
-	for _, test := range authTests {
-		request :=  makeRequest(authorizationRequest{test.beamtime_id+"%"+test.beamline+"%"+test.stream+"%"+test.token,"host"})
-		w := doPostRequest("/authorize",request)
-
-		body, _ := ioutil.ReadAll(w.Body)
-		if test.status==http.StatusOK {
-			body_str:=string(body)
-			body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
-			body_str = strings.Replace(body_str,"//","/",-1)
-			assert.Contains(t, body_str, test.beamtime_id, "")
-			assert.Contains(t, body_str, "bl1", "")
-			assert.Contains(t, body_str, "stream", "")
-			assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test", "")
-			if (test.beamtime_id == "test_online") {
-				assert.Contains(t, body_str, "tf/gpfs/bl1/2019/data/test_online", "")
-				assert.Contains(t, body_str, "bl1/current", "")
-			} else {
-				assert.NotContains(t, body_str, "current", "")
-			}
-			assert.Contains(t, body_str, test.stream, "")
-		}
-
-		assert.Equal(t, test.status, w.Code, test.message)
-	}
-
-
-}
-
-
 var beamtime_meta =`
 {
 "applicant": {
@@ -201,48 +146,108 @@ var beamtime_meta =`
 }
 `
 
-var authBeamlineTests = [] struct {
+var authTests = [] struct {
+	source_type string
 	beamtime_id string
 	beamline string
+	stream string
 	token string
+	originHost string
 	status int
 	message string
+	answer string
 }{
-	{"11111111","p07", prepareToken("bl_p07"),http.StatusOK,"beamtime found"},
-	{"11111111","p07", prepareToken("bl_p06"),http.StatusUnauthorized,"wrong token"},
-	{"11111111","p08", prepareToken("bl_p08"),http.StatusUnauthorized,"beamtime not found"},
+	{"processed","test","auto","stream", prepareToken("test"),"127.0.0.2",http.StatusOK,"user stream with correct token",
+		`{"beamtimeId":"test","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
+	{"processed","test_online","auto","stream", prepareToken("test_online"),"127.0.0.1",http.StatusOK,"with online path, processed type",
+		`{"beamtimeId":"test_online","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"","source-type":"processed"}`},
+	{"processed","test1","auto","stream", prepareToken("test1"),"127.0.0.1",http.StatusUnauthorized,"correct token, beamtime not found",
+		""},
+	{"processed","test","auto","stream", prepareToken("wrong"),"127.0.0.1",http.StatusUnauthorized,"user stream with wrong token",
+		""},
+	{"processed","test","bl1","stream", prepareToken("test"),"127.0.0.1",http.StatusOK,"correct beamline given",
+		`{"beamtimeId":"test","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
+		{"processed","test","bl2","stream", prepareToken("test"),"127.0.0.1",http.StatusUnauthorized,"incorrect beamline given",
+		""},
+	{"processed","auto","p07", "stream",prepareToken("bl_p07"),"127.0.0.1",http.StatusOK,"beamtime found",
+		`{"beamtimeId":"11111111","beamline":"p07","stream":"stream","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"","source-type":"processed"}`},
+	{"processed","auto","p07", "stream",prepareToken("bl_p06"),"127.0.0.1",http.StatusUnauthorized,"wrong token",
+		""},
+	{"processed","auto","p08", "stream",prepareToken("bl_p08"),"127.0.0.1",http.StatusUnauthorized,"beamtime not found",
+		""},
+	{"raw","test_online","auto","stream", prepareToken("test_online"),"127.0.0.1",http.StatusOK,"raw type",
+		`{"beamtimeId":"test_online","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw"}`},
+	{"raw","test_online","auto","stream", "","127.0.0.1",http.StatusOK,"raw type",
+		`{"beamtimeId":"test_online","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw"}`},
+ 	{"raw","auto","p07","stream", "","127.0.0.1",http.StatusOK,"raw type, auto beamtime",
+		`{"beamtimeId":"11111111","beamline":"p07","stream":"stream","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"./p07/current","source-type":"raw"}`},
+	{"raw","auto","p07","noldap", "","127.0.0.1",http.StatusNotFound,"no conection to ldap",
+		""},
+	{"raw","test_online","auto","stream", "","127.0.0.2",http.StatusUnauthorized,"raw type, wrong origin host",
+		""},
+	{"raw","test","auto","stream", prepareToken("test"),"127.0.0.1",http.StatusUnauthorized,"raw when not online",
+		""},
+	{"processed","test","auto","stream", "","127.0.0.1:1001",http.StatusOK,"processed without token",
+		`{"beamtimeId":"test","beamline":"bl1","stream":"stream","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
+	{"processed","test","auto","stream", "","127.0.0.2",http.StatusUnauthorized,"processed without token, wrong host",
+		""},
 }
 
-func TestAuthorizeBeamline(t *testing.T) {
+func TestAuthorize(t *testing.T) {
+	ldapClient = mockClient
 	allowBeamlines([]beamtimeMeta{})
+
+	expected_uri := "expected_uri"
+	expected_base := "expected_base"
+	allowed_ips := []string{"127.0.0.1"}
+	settings.RootBeamtimesFolder ="."
 	settings.CurrentBeamlinesFolder="."
+	settings.Ldap.FilterTemplate="a3__BEAMLINE__-hosts"
+	settings.Ldap.Uri = expected_uri
+	settings.Ldap.BaseDn = expected_base
+
+	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test"), os.ModePerm)
+	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test_online"), os.ModePerm)
 	os.MkdirAll(filepath.Clean("p07/current"), os.ModePerm)
+	os.MkdirAll(filepath.Clean("bl1/current"), os.ModePerm)
 	ioutil.WriteFile(filepath.Clean("p07/current/beamtime-metadata-11111111.json"), []byte(beamtime_meta), 0644)
+	ioutil.WriteFile(filepath.Clean("bl1/current/beamtime-metadata-test_online.json"), []byte(beamtime_meta_online), 0644)
 	defer 	os.RemoveAll("p07")
+	defer 	os.RemoveAll("tf")
+	defer 	os.RemoveAll("bl1")
+
+	for _, test := range authTests {
+		if test.source_type == "raw" || test.token == "" {bl := test.beamline
+			if test.beamline == "auto" {
+				bl = "bl1"
+			}
+			expected_filter:="a3"+bl+"-hosts"
+			if test.stream == "noldap" {
+				err := &common.ServerError{utils.StatusServiceUnavailable,""}
+				mockClient.On("GetAllowedIpsForBeamline", expected_uri, expected_base,expected_filter).Return([]string{}, err)
+			} else {
+				mockClient.On("GetAllowedIpsForBeamline", expected_uri, expected_base,expected_filter).Return(allowed_ips, nil)
+			}
+		}
 
-	for _, test := range authBeamlineTests {
-		request :=  makeRequest(authorizationRequest{"auto%"+test.beamline+"%stream%"+test.token,"host"})
+		request :=  makeRequest(authorizationRequest{test.source_type+"%"+test.beamtime_id+"%"+test.beamline+"%"+test.stream+"%"+test.token,test.originHost})
 		w := doPostRequest("/authorize",request)
 
 		body, _ := ioutil.ReadAll(w.Body)
-		body_str:=string(body)
-		body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
-		body_str = strings.Replace(body_str,"//","/",-1)
 		if test.status==http.StatusOK {
-			assert.Contains(t, body_str, test.beamtime_id, "")
-			assert.Contains(t, body_str, test.beamline, "")
-			assert.Contains(t, body_str, "asap3/petra3/gpfs/p07/2020/data/11111111", "")
-			assert.Contains(t, body_str, "p07/current", "")
-			assert.Contains(t, body_str, "stream", "")
+			body_str:=string(body)
+			body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
+			body_str = strings.Replace(body_str,"//","/",-1)
+			assert.Equal(t, test.answer,body_str,test.message)
 		}
-
-		assert.Equal(t, test.status, w.Code, test.message)
+		assert.Equal(t, test.status,w.Code, test.message)
+		mockClient.AssertExpectations(t)
+		mockClient.ExpectedCalls=nil
 	}
 }
 
-
 func TestNotAuthorized(t *testing.T) {
-	request :=  makeRequest(authorizationRequest{"any_id%%%","host"})
+	request :=  makeRequest(authorizationRequest{"raw%any_id%%%","host"})
 	w := doPostRequest("/authorize",request)
 	assert.Equal(t, http.StatusUnauthorized, w.Code, "")
 }
@@ -260,7 +265,7 @@ func TestAuthorizeWrongPath(t *testing.T) {
 }
 
 func TestDoNotAuthorizeIfNotInAllowed(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf"}})
+	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf",""}})
 
 	request :=  authorizationRequest{"asapo_test%%","host"}
 	creds,_ := getSourceCredentials(request)
@@ -279,45 +284,6 @@ func TestSplitHostNoPort(t *testing.T) {
 	assert.Equal(t,"127.0.0.1", host, "")
 }
 
-func TestGetBeamlineFromIP(t *testing.T) {
-	beamline, err := getBeamlineFromIP("127.0.0.1:112")
-	assert.NotNil(t,err, "")
-	assert.Empty(t,beamline, "")
-
-}
-
-func TestAuthorizeWithFile(t *testing.T) {
-	settings.IpBeamlineMappingFolder="."
-	settings.RootBeamtimesFolder ="."
-	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/11003924"), os.ModePerm)
-
-
-	ioutil.WriteFile("127.0.0.1", []byte("bl1"), 0644)
-
-
-	request := authorizationRequest{"11003924%%%","127.0.0.1"}
-	w := doPostRequest("/authorize",makeRequest(request))
-
-	body, _ := ioutil.ReadAll(w.Body)
-	body_str:=string(body)
-	body_str = strings.Replace(body_str,string(os.PathSeparator),"/",-1)
-	body_str = strings.Replace(body_str,"//","/",-1)
-	assert.Contains(t,body_str,"tf/gpfs/bl1/2019/data/11003924")
-	assert.Contains(t, body_str, "11003924", "")
-	assert.Contains(t, body_str, "bl1", "")
-	assert.Contains(t, body_str, "detector", "")
-	assert.Equal(t, http.StatusOK, w.Code, "")
-
-	request = authorizationRequest{"wrong%%%","127.0.0.1"}
-	w = doPostRequest("/authorize",makeRequest(request))
-	assert.Equal(t, http.StatusUnauthorized, w.Code, "")
-
-	os.Remove("127.0.0.1")
-	os.RemoveAll("tf")
-
-}
-
-
 var extractBtinfoTests = [] struct {
 	root string
 	fname string
diff --git a/authorizer/src/asapo_authorizer/server/folder_token.go b/authorizer/src/asapo_authorizer/server/folder_token.go
index 8c50ae06224924888ea1ce450a73e16e37731769..bb69d5b9b34a56169a596c8c2cbec13db7fc6fd7 100644
--- a/authorizer/src/asapo_authorizer/server/folder_token.go
+++ b/authorizer/src/asapo_authorizer/server/folder_token.go
@@ -66,7 +66,7 @@ func extractFolderTokenrequest(r *http.Request) (folderTokenRequest,error) {
 }
 
 func checkBeamtimeFolder(request folderTokenRequest) error {
-	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId,"auto","",""})
+	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId,"auto","","",""})
 	if err != nil {
 		log.Error("cannot get beamtime meta"+err.Error())
 		return err
diff --git a/authorizer/src/asapo_authorizer/server/server.go b/authorizer/src/asapo_authorizer/server/server.go
index c2fbe406905883e4e02a818d56880e8b0da71e43..e5f7518738ecce6cc70722de09161b39369f698e 100644
--- a/authorizer/src/asapo_authorizer/server/server.go
+++ b/authorizer/src/asapo_authorizer/server/server.go
@@ -1,7 +1,8 @@
 package server
 
 import (
-"asapo_common/utils"
+	"asapo_authorizer/ldap_client"
+	"asapo_common/utils"
 )
 
 type  beamtimeMeta struct {
@@ -10,20 +11,26 @@ type  beamtimeMeta struct {
 	Stream string       `json:"stream"`
 	OfflinePath string `json:"core-path"`
 	OnlinePath string `json:"beamline-path"`
+	Type string `json:"source-type"`
 }
 
 type serverSettings struct {
 	Port                    int
 	LogLevel                string
-	IpBeamlineMappingFolder string
 	RootBeamtimesFolder     string
 	CurrentBeamlinesFolder string
 	AlwaysAllowedBeamtimes  []beamtimeMeta
 	SecretFile              string
 	TokenDurationMin    	int
+	Ldap struct {
+		Uri string
+		BaseDn string
+		FilterTemplate string
+	}
 }
 
 var settings serverSettings
+var ldapClient ldap_client.LdapClient
 var authHMAC utils.Auth
 var authJWT utils.Auth
 
diff --git a/authorizer/src/asapo_authorizer/server/server_nottested.go b/authorizer/src/asapo_authorizer/server/server_nottested.go
index 4f693e3812b700507906c02936c915b00e3379de..b141dcd960de570e86e3c57e2f062bb4983c17b1 100644
--- a/authorizer/src/asapo_authorizer/server/server_nottested.go
+++ b/authorizer/src/asapo_authorizer/server/server_nottested.go
@@ -3,6 +3,7 @@
 package server
 
 import (
+	"asapo_authorizer/ldap_client"
 	log "asapo_common/logger"
 	"asapo_common/utils"
 	"asapo_common/version"
@@ -13,6 +14,7 @@ import (
 
 func Start() {
 	mux := utils.NewRouter(listRoutes)
+	ldapClient = new (ldap_client.OpenLdapClient)
 	log.Info("Starting ASAPO Authorizer, version " + version.GetVersion())
 	log.Info("Listening on port: " + strconv.Itoa(settings.Port))
 	log.Fatal(http.ListenAndServe(":"+strconv.Itoa(settings.Port), http.HandlerFunc(mux.ServeHTTP)))
diff --git a/broker/src/asapo_broker/server/statistics.go b/broker/src/asapo_broker/server/statistics.go
index 1dff498f09d58576b1bf175acc60ddc130fcffbf..13b0103306968508b6c3141de9733d9d3cea7066 100644
--- a/broker/src/asapo_broker/server/statistics.go
+++ b/broker/src/asapo_broker/server/statistics.go
@@ -58,11 +58,9 @@ func (st *serverStatistics) WriteStatistic() (err error) {
 func (st *serverStatistics) Monitor() {
 	for {
 		time.Sleep(1000 * time.Millisecond)
-		logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 		if err := st.WriteStatistic(); err != nil {
+		    logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 			log.Error(logstr + " - " + err.Error())
-		} else {
-			log.Debug(logstr)
 		}
 		st.Reset()
 	}
diff --git a/common/cpp/include/common/data_structs.h b/common/cpp/include/common/data_structs.h
index 0195cd61b28acf38d3428f506576510767fe8690..5c3408dfb8a10c6b63ffd45f68a67d8c86d08fb8 100644
--- a/common/cpp/include/common/data_structs.h
+++ b/common/cpp/include/common/data_structs.h
@@ -58,14 +58,21 @@ struct DataSet {
 
 using SubDirList = std::vector<std::string>;
 
+enum class SourceType {
+  kProcessed,
+  kRaw
+};
+
+Error GetSourceTypeFromString(std::string stype,SourceType *type);
+std::string GetStringFromSourceType(SourceType type);
 
 struct SourceCredentials {
-    SourceCredentials(std::string beamtime, std::string beamline, std::string stream, std::string token):
+    SourceCredentials(SourceType type, std::string beamtime, std::string beamline, std::string stream, std::string token):
         beamtime_id{std::move(beamtime)},
         beamline{std::move(beamline)},
         stream{std::move(stream)},
-        user_token{std::move(token)} {
-    };
+        user_token{std::move(token)},
+        type{type}{};
     SourceCredentials() {};
     static const std::string kDefaultStream;
     static const std::string kDefaultBeamline;
@@ -74,8 +81,9 @@ struct SourceCredentials {
     std::string beamline;
     std::string stream;
     std::string user_token;
+    SourceType type = SourceType::kProcessed;
     std::string GetString() {
-        return beamtime_id + "%" + beamline + "%" + stream + "%" + user_token;
+        return (type==SourceType::kRaw?std::string("raw"):std::string("processed")) + "%"+ beamtime_id + "%" + beamline + "%" + stream + "%" + user_token;
     };
 };
 
diff --git a/common/cpp/src/data_structs/data_structs.cpp b/common/cpp/src/data_structs/data_structs.cpp
index 9d43c3e66609dcd245bf789bad6be0a99b5f2362..b0bc5dc3cdb0c4df3c27ff7d09469289cca0489d 100644
--- a/common/cpp/src/data_structs/data_structs.cpp
+++ b/common/cpp/src/data_structs/data_structs.cpp
@@ -23,6 +23,25 @@ const std::string SourceCredentials::kDefaultStream = "detector";
 const std::string SourceCredentials::kDefaultBeamline = "auto";
 const std::string SourceCredentials::kDefaultBeamtimeId = "auto";
 
+std::string GetStringFromSourceType(SourceType type) {
+    switch (type) {
+        case SourceType::kRaw:return "raw";
+        case SourceType::kProcessed:return "processed";
+    }
+}
+
+Error GetSourceTypeFromString(std::string stype,SourceType *type) {
+    Error err;
+    if (stype=="raw") {
+        *type = SourceType::kRaw;
+        return nullptr;
+    } else if (stype=="processed") {
+        *type = SourceType::kProcessed;
+        return nullptr;
+    } else {
+        return TextError("cannot parse error type: "+stype);
+    }
+}
 
 std::string FileInfo::Json() const {
     auto nanoseconds_from_epoch = std::chrono::time_point_cast<std::chrono::nanoseconds>(modify_date).
diff --git a/common/cpp/src/system_io/system_io_windows.cpp b/common/cpp/src/system_io/system_io_windows.cpp
index 1fb5c03250d0a2e4a56e99bb29158e3d5a78f9d1..52cb799e5423e774e3286556b6880401090bd45f 100644
--- a/common/cpp/src/system_io/system_io_windows.cpp
+++ b/common/cpp/src/system_io/system_io_windows.cpp
@@ -61,6 +61,7 @@ Error IOErrorFromGetLastError() {
     case WSAECONNREFUSED:
         return IOErrorTemplates::kConnectionRefused.Generate();
     case ERROR_FILE_EXISTS:
+    case ERROR_ALREADY_EXISTS:
         return IOErrorTemplates::kFileAlreadyExists.Generate();
     default:
         std::cout << "[IOErrorFromGetLastError] Unknown error code: " << last_error << std::endl;
diff --git a/common/cpp/unittests/data_structs/test_data_structs.cpp b/common/cpp/unittests/data_structs/test_data_structs.cpp
index 6efe76354b5893ee97e0154dfb1034cdce35187c..ffd8d7f86aa725129775c91848a1f03ee599b0c8 100644
--- a/common/cpp/unittests/data_structs/test_data_structs.cpp
+++ b/common/cpp/unittests/data_structs/test_data_structs.cpp
@@ -7,6 +7,8 @@
 
 using asapo::FileInfo;
 using asapo::StreamInfo;
+using asapo::SourceType;
+using asapo::SourceCredentials;
 
 using ::testing::AtLeast;
 using ::testing::Eq;
@@ -198,5 +200,38 @@ TEST(StreamInfo, ConvertToJson) {
     ASSERT_THAT(expected_json, Eq(json));
 }
 
+TEST(SourceCredentials, ConvertToString) {
+    auto sc = SourceCredentials{SourceType::kRaw,"beamtime","beamline","stream","token"};
+    std::string expected1= "raw%beamtime%beamline%stream%token";
+    std::string expected2= "processed%beamtime%beamline%stream%token";
+
+    auto res1 = sc.GetString();
+    sc.type = asapo::SourceType::kProcessed;
+    auto res2 = sc.GetString();
+
+    ASSERT_THAT(res1, Eq(expected1));
+    ASSERT_THAT(res2, Eq(expected2));
+}
+
+TEST(SourceCredentials, SourceTypeFromString) {
+    SourceType type1,type2,type3;
+
+    auto err1=GetSourceTypeFromString("raw",&type1);
+    auto err2=GetSourceTypeFromString("processed",&type2);
+    auto err3=GetSourceTypeFromString("bla",&type3);
+
+    ASSERT_THAT(err1, Eq(nullptr));
+    ASSERT_THAT(type1, Eq(SourceType::kRaw));
+    ASSERT_THAT(err2, Eq(nullptr));
+    ASSERT_THAT(type2, Eq(SourceType::kProcessed));
+    ASSERT_THAT(err3, Ne(nullptr));
+}
+
+TEST(SourceCredentials, DefaultSourceTypeInSourceCreds) {
+    SourceCredentials sc;
+
+    ASSERT_THAT(sc.type, Eq(SourceType::kProcessed));
+}
+
 
 }
diff --git a/consumer/api/cpp/unittests/test_consumer_api.cpp b/consumer/api/cpp/unittests/test_consumer_api.cpp
index 90e5b0b0182c91f63a0a1b03d056a809b76c0d63..c09f0b2f470795e3b1c8361206233cdc56aa7691 100644
--- a/consumer/api/cpp/unittests/test_consumer_api.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_api.cpp
@@ -27,7 +27,7 @@ class DataBrokerFactoryTests : public Test {
 
 TEST_F(DataBrokerFactoryTests, CreateServerDataSource) {
 
-    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", false, asapo::SourceCredentials{"beamtime_id", "", "", "token"}, &error);
+    auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", false, asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"}, &error);
 
     ASSERT_THAT(error, Eq(nullptr));
     ASSERT_THAT(dynamic_cast<ServerDataBroker*>(data_broker.get()), Ne(nullptr));
diff --git a/consumer/api/cpp/unittests/test_server_broker.cpp b/consumer/api/cpp/unittests/test_server_broker.cpp
index c1125ccae8330ff5c9c2e515a6f54a561a916a25..17c6a624a2fedfcb563594670ff09769e412b729 100644
--- a/consumer/api/cpp/unittests/test_server_broker.cpp
+++ b/consumer/api/cpp/unittests/test_server_broker.cpp
@@ -46,7 +46,7 @@ namespace {
 TEST(FolderDataBroker, Constructor) {
     auto data_broker =
     std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "path", false,
-                asapo::SourceCredentials{"beamtime_id", "", "", "token"})
+                asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"})
     };
     ASSERT_THAT(dynamic_cast<asapo::SystemIO*>(data_broker->io__.get()), Ne(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::CurlHttpClient*>(data_broker->httpclient__.get()), Ne(nullptr));
@@ -87,10 +87,10 @@ class ServerDataBrokerTests : public Test {
     void AssertSingleFileTransfer();
     void SetUp() override {
         data_broker = std::unique_ptr<ServerDataBroker> {
-            new ServerDataBroker(expected_server_uri, expected_path, true, asapo::SourceCredentials{expected_beamtime_id, "", expected_stream, expected_token})
+            new ServerDataBroker(expected_server_uri, expected_path, true, asapo::SourceCredentials{asapo::SourceType::kProcessed,expected_beamtime_id, "", expected_stream, expected_token})
         };
         fts_data_broker = std::unique_ptr<ServerDataBroker> {
-            new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{expected_beamtime_id, "", expected_stream, expected_token})
+            new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{asapo::SourceType::kProcessed,expected_beamtime_id, "", expected_stream, expected_token})
         };
         data_broker->io__ = std::unique_ptr<IO> {&mock_io};
         data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
@@ -178,7 +178,7 @@ TEST_F(ServerDataBrokerTests, DefaultStreamIsDetector) {
     data_broker->httpclient__.release();
     data_broker->net_client__.release();
     data_broker = std::unique_ptr<ServerDataBroker> {
-        new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{"beamtime_id", "", "", expected_token})
+        new ServerDataBroker(expected_server_uri, expected_path, false, asapo::SourceCredentials{asapo::SourceType::kProcessed,"beamtime_id", "", "", expected_token})
     };
     data_broker->io__ = std::unique_ptr<IO> {&mock_io};
     data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
diff --git a/consumer/api/python/CMakeLists_Linux.cmake b/consumer/api/python/CMakeLists_Linux.cmake
index 5c6be372a27df3d93b7dabfd119496fe57ad04ab..22888994e87572147eb002acc1b89d48089352be 100644
--- a/consumer/api/python/CMakeLists_Linux.cmake
+++ b/consumer/api/python/CMakeLists_Linux.cmake
@@ -1,9 +1,5 @@
 
-if (ENABLE_LIBFABRIC)
-    set (TEMP_ADDITIONAL_LINK_ARGS_PART ", '-lfabric'")
-else()
-    set (TEMP_ADDITIONAL_LINK_ARGS_PART "")
-endif()
+set (TEMP_ADDITIONAL_LINK_ARGS_PART "") # Arguments for the linker
 
 if ((CMAKE_BUILD_TYPE STREQUAL "Debug") AND (CMAKE_C_COMPILER_ID STREQUAL "GNU"))
     set (EXTRA_COMPILE_ARGS "['--std=c++11']")
diff --git a/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json b/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
index b3ad3fd52c785b586e2277882f30cbf6289a87d1..76d3a8480012356fccf9daa46e676e6595b00ca4 100644
--- a/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
+++ b/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
@@ -7,5 +7,11 @@
   "RootBeamtimesFolder":"{{ .Values.common.offlineDir }}",
   "CurrentBeamlinesFolder":"{{ .Values.common.onlineDir }}",
   "SecretFile":"/etc/authorizer/auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+  {
+  "Uri" : "ldap://localhost:389",
+  "BaseDn" : "ou=rgy,o=desy,c=de",
+  "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+  }
 }
diff --git a/deploy/asapo_services/scripts/authorizer.json.tpl b/deploy/asapo_services/scripts/authorizer.json.tpl
index e4f4e62bc86d2b15bd1bfc1fec751073cbc7ce1a..e73af4ae5dc891ba81098ee47a31a09bb7481f1d 100644
--- a/deploy/asapo_services/scripts/authorizer.json.tpl
+++ b/deploy/asapo_services/scripts/authorizer.json.tpl
@@ -7,5 +7,11 @@
   "RootBeamtimesFolder":"{{ env "NOMAD_META_offline_dir" }}",
   "CurrentBeamlinesFolder":"{{ env "NOMAD_META_online_dir" }}",
   "SecretFile":"/local/secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
 }
diff --git a/deploy/build_env/Ubuntu16.04/Dockerfile b/deploy/build_env/Ubuntu16.04/Dockerfile
index 83de5fd69999360bb213ab4c24fd3ff7d49f3e10..20f8c85911f3fab595252eac824e5c4543132825 100644
--- a/deploy/build_env/Ubuntu16.04/Dockerfile
+++ b/deploy/build_env/Ubuntu16.04/Dockerfile
@@ -4,11 +4,12 @@ ENV GOPATH /tmp
 
 ADD install_curl.sh install_curl.sh
 ADD install_cmake.sh install_cmake.sh
+ADD install_libfabric.sh install_libfabric.sh
 
 RUN apt update && apt install -y g++ golang zlib1g-dev python cython python-numpy python3 cython3 python3-numpy  \
 git wget python-pip python3-pip && \
 pip  --no-cache-dir install --upgrade cython && pip3  --no-cache-dir install --upgrade cython && pip3  --no-cache-dir install sphinx && \
-./install_curl.sh /curl &&  ./install_cmake.sh && \
+./install_curl.sh /curl &&  ./install_cmake.sh && ./install_libfabric.sh \
 apt-get purge -y --auto-remove
 
 ADD build.sh /bin/build.sh
diff --git a/deploy/build_env/Ubuntu16.04/build.sh b/deploy/build_env/Ubuntu16.04/build.sh
index 4daa3974e7e7b74bddf00b38fdac56b936ff5a75..6cc5e5b959aa672abc23df2ac32eadf754c9c4d1 100755
--- a/deploy/build_env/Ubuntu16.04/build.sh
+++ b/deploy/build_env/Ubuntu16.04/build.sh
@@ -5,6 +5,7 @@ set -e
 cd /asapo/build
 cmake \
     -DCMAKE_BUILD_TYPE="Release" \
+    -DENABLE_LIBFABRIC=on \
     -DLIBCURL_DIR=/curl \
     -DBUILD_PYTHON_DOCS=ON \
     -DBUILD_EVENT_MONITOR_PRODUCER=ON \
diff --git a/deploy/build_env/Ubuntu16.04/build_image.sh b/deploy/build_env/Ubuntu16.04/build_image.sh
index 415b215afd6aaf4ba1eb53a35b5830ae37581e4f..df1c4b6cee2e8f9a5712813f061b1d1803cb24e3 100755
--- a/deploy/build_env/Ubuntu16.04/build_image.sh
+++ b/deploy/build_env/Ubuntu16.04/build_image.sh
@@ -1,4 +1,4 @@
 #!/usr/bin/env bash
 set -e
 docker build -t yakser/asapo-env:ubuntu16.04 .
-docker push yakser/asapo-env:ubuntu16.04
+#docker push yakser/asapo-env:ubuntu16.04
diff --git a/deploy/build_env/Ubuntu16.04/install_libfabric.sh b/deploy/build_env/Ubuntu16.04/install_libfabric.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cfa39b7acf036cb1c69e083e2be46f50cb66e6b6
--- /dev/null
+++ b/deploy/build_env/Ubuntu16.04/install_libfabric.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+apt install -y wget autoconf libtool make librdmacm-dev
+wget https://github.com/ofiwg/libfabric/archive/v1.11.0.tar.gz
+tar xzf v1.11.0.tar.gz
+cd libfabric-1.11.0
+./autogen.sh
+./configure
+make
+make install
+cd -
+rm -rf libfabric-1.11.0
+rm v1.11.0.tar.gz
diff --git a/deploy/build_env/manylinux2010/Dockerfile b/deploy/build_env/manylinux2010/Dockerfile
index f9ea1e6244926873f98c0f7ea3e77ed3d47d8af6..847c30f8d5500cf6212cea09da1f91dcbcfa822b 100644
--- a/deploy/build_env/manylinux2010/Dockerfile
+++ b/deploy/build_env/manylinux2010/Dockerfile
@@ -7,10 +7,12 @@ RUN yum update -y && yum install -y golang wget zlib-devel
 ADD install_curl.sh install_curl.sh
 RUN ./install_curl.sh /curl
 
-ADD build.sh /bin/build.sh
-
-RUN chmod og+rwX -R /opt
-
 ADD install_cmake.sh install_cmake.sh
 RUN ./install_cmake.sh
 
+ADD install_libfabric.sh install_libfabric.sh
+RUN ./install_libfabric.sh
+
+RUN chmod og+rwX -R /opt
+
+ADD build.sh /bin/build.sh
\ No newline at end of file
diff --git a/deploy/build_env/manylinux2010/build.sh b/deploy/build_env/manylinux2010/build.sh
index afc0ab8d0a5e6116bf06991def4a92e1c1108c39..5e2fa538f7e304fdd7f27d9656b0eb37b70fbf00 100755
--- a/deploy/build_env/manylinux2010/build.sh
+++ b/deploy/build_env/manylinux2010/build.sh
@@ -6,7 +6,7 @@ for python_path in /opt/python/cp{27,35,36,37}*m; do
     pip=$python_path/bin/pip
 
     cd /asapo/build
-    cmake -DCMAKE_BUILD_TYPE="Release" -DLIBCURL_DIR=/curl -DPython_EXECUTABLE=$python ..
+    cmake -DENABLE_LIBFABRIC=on -DCMAKE_BUILD_TYPE="Release" -DLIBCURL_DIR=/curl -DPython_EXECUTABLE=$python ..
     cd consumer \
         && $pip install -r /asapo/consumer/api/python/dev-requirements.txt \
         && make \
diff --git a/deploy/build_env/manylinux2010/install_libfabric.sh b/deploy/build_env/manylinux2010/install_libfabric.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cfa39b7acf036cb1c69e083e2be46f50cb66e6b6
--- /dev/null
+++ b/deploy/build_env/manylinux2010/install_libfabric.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+apt install -y wget autoconf libtool make librdmacm-dev
+wget https://github.com/ofiwg/libfabric/archive/v1.11.0.tar.gz
+tar xzf v1.11.0.tar.gz
+cd libfabric-1.11.0
+./autogen.sh
+./configure
+make
+make install
+cd -
+rm -rf libfabric-1.11.0
+rm v1.11.0.tar.gz
diff --git a/examples/consumer/getnext_broker/getnext_broker.cpp b/examples/consumer/getnext_broker/getnext_broker.cpp
index 037eaf4606823b9ea51acabbd87e77df4ee12ed1..05a15a02dcb9abc5b25e9f87eaee572936e03c95 100644
--- a/examples/consumer/getnext_broker/getnext_broker.cpp
+++ b/examples/consumer/getnext_broker/getnext_broker.cpp
@@ -98,7 +98,7 @@ StartThreads(const Args& params, std::vector<int>* nfiles, std::vector<int>* err
         asapo::FileInfo fi;
         Error err;
         auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.file_path, true,
-                      asapo::SourceCredentials{params.beamtime_id, "", params.stream, params.token}, &err);
+                      asapo::SourceCredentials{asapo::SourceType::kProcessed,params.beamtime_id, "", params.stream, params.token}, &err);
         if (err) {
             std::cout << "Error CreateServerBroker: " << err << std::endl;
             exit(EXIT_FAILURE);
diff --git a/examples/pipeline/in_to_out/check_linux.sh b/examples/pipeline/in_to_out/check_linux.sh
index 6490fbadbf464a8708f57769e9b0715bdb28483f..8214bba5b2aa52a80676a148d92e757bd2851bcb 100644
--- a/examples/pipeline/in_to_out/check_linux.sh
+++ b/examples/pipeline/in_to_out/check_linux.sh
@@ -32,9 +32,10 @@ Cleanup() {
     nomad stop broker
     nomad stop receiver
     nomad stop authorizer
-	echo "db.dropDatabase()" | mongo ${indatabase_name}
-	echo "db.dropDatabase()" | mongo ${outdatabase_name}
-	rm -rf file1 file2 file3
+	  echo "db.dropDatabase()" | mongo ${indatabase_name}
+	  echo "db.dropDatabase()" | mongo ${outdatabase_name}
+    echo "db.dropDatabase()" | mongo ${outdatabase_name2}
+	  rm -rf processed
     rm -rf ${receiver_root_folder}
     rm -rf out out2
 
@@ -47,14 +48,14 @@ nomad run receiver_tcp.nmd
 nomad run authorizer.nmd
 
 mkdir -p $receiver_folder
-
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed/file1
+echo hello2 > processed/file2
+echo hello3 > processed/file3
 
 for i in `seq 1 3`;
 do
-	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
+	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'processed/file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
 done
 
 sleep 1
@@ -66,11 +67,11 @@ cat out | grep "Sent 3 file(s)"
 
 echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name} | tee /dev/stderr | grep file1_${stream_out}
 
-cat ${receiver_folder}/file1_${stream_out} | grep hello1
-cat ${receiver_folder}/file2_${stream_out} | grep hello2
-cat ${receiver_folder}/file3_${stream_out} | grep hello3
+cat ${receiver_folder}/processed/file1_${stream_out} | grep hello1
+cat ${receiver_folder}/processed/file2_${stream_out} | grep hello2
+cat ${receiver_folder}/processed/file3_${stream_out} | grep hello3
 
 $1 127.0.0.1:8400 $source_path $beamtime_id $stream_in $stream_out2 $token 2 1000 25000 0  > out2
 cat out2
-test ! -f ${receiver_folder}/file1_${stream_out2}
-echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name2} | tee /dev/stderr | grep ./file1
+test ! -f ${receiver_folder}/processed/file1_${stream_out2}
+echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name2} | tee /dev/stderr | grep processed/file1
diff --git a/examples/pipeline/in_to_out/check_windows.bat b/examples/pipeline/in_to_out/check_windows.bat
index 9b575777b8fb97ab7f155e0e94c414cafa7fe038..9d45718a5bcb01e83849f6d7dfc02596a6a4195b 100644
--- a/examples/pipeline/in_to_out/check_windows.bat
+++ b/examples/pipeline/in_to_out/check_windows.bat
@@ -20,13 +20,14 @@ SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
 
 call start_services.bat
 
-for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
+for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"processed\\file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
 
 mkdir %receiver_folder%
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed\file1
+echo hello2 > processed\file2
+echo hello3 > processed\file3
 
 
 "%1" 127.0.0.1:8400 %source_path% %beamtime_id%  %stream_in% %stream_out% %token% 2 1000 25000 1 > out
@@ -36,9 +37,9 @@ findstr /I /L /C:"Sent 3 file(s)" out || goto :error
 
 echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name% | findstr  /c:"file1_%stream_out%"  || goto :error
 
-findstr /I /L /C:"hello1" %receiver_folder%\file1_%stream_out% || goto :error
-findstr /I /L /C:"hello2" %receiver_folder%\file2_%stream_out% || goto :error
-findstr /I /L /C:"hello3" %receiver_folder%\file3_%stream_out% || goto :error
+findstr /I /L /C:"hello1" %receiver_folder%\processed\file1_%stream_out% || goto :error
+findstr /I /L /C:"hello2" %receiver_folder%\processed\file2_%stream_out% || goto :error
+findstr /I /L /C:"hello3" %receiver_folder%\processed\file3_%stream_out% || goto :error
 
 
 "%1" 127.0.0.1:8400 %source_path% %beamtime_id%  %stream_in% %stream_out2% %token% 2 1000 25000 0 > out2
@@ -47,7 +48,7 @@ findstr /I /L /C:"Processed 3 file(s)" out2 || goto :error
 findstr /I /L /C:"Sent 3 file(s)" out2 || goto :error
 
 
-echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name2% | findstr /c:".\\\\file1" || goto :error
+echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name2% | findstr /c:"file1" || goto :error
 
 
 goto :clean
@@ -63,4 +64,4 @@ echo db.dropDatabase() | %mongo_exe% %indatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name2%
 rmdir /S /Q %receiver_root_folder%
-del file1 file2 file3 out out2
+rmdir /S /Q processed
diff --git a/examples/pipeline/in_to_out/in_to_out.cpp b/examples/pipeline/in_to_out/in_to_out.cpp
index e9e794cb660b8ed78b6ca5b6cc5f8b8bbe188ce8..9001d99834a99c7007d7b56a0faf482e06fa4d41 100644
--- a/examples/pipeline/in_to_out/in_to_out.cpp
+++ b/examples/pipeline/in_to_out/in_to_out.cpp
@@ -12,6 +12,7 @@
 
 #include "asapo_consumer.h"
 #include "asapo_producer.h"
+#include "preprocessor/definitions.h"
 
 using std::chrono::system_clock;
 using asapo::Error;
@@ -65,7 +66,7 @@ int ProcessError(const Error& err) {
 
 BrokerPtr CreateBrokerAndGroup(const Args& args, Error* err) {
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, args.file_path, true,
-                  asapo::SourceCredentials{args.beamtime_id, "", args.stream_in, args.token}, err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", args.stream_in, args.token}, err);
     if (*err) {
         return nullptr;
     }
@@ -103,7 +104,7 @@ void SendDataDownstreamThePipeline(const Args& args, const asapo::FileInfo& fi,
         header.file_name += "_" + args.stream_out;
         err_send = producer->SendData(header, std::move(data), asapo::kDefaultIngestMode, ProcessAfterSend);
     } else {
-        header.file_name = args.file_path + "/" + header.file_name;
+        header.file_name = args.file_path + asapo::kPathSeparator + header.file_name;
         err_send = producer->SendData(header, nullptr, asapo::IngestModeFlags::kTransferMetaDataOnly, ProcessAfterSend);
         std::cout << err_send << std::endl;
     }
@@ -188,7 +189,7 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.server, args.nthreads,
                                             asapo::RequestHandlerType::kTcp,
-                                            asapo::SourceCredentials{args.beamtime_id, "", args.stream_out, args.token }, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", args.stream_out, args.token }, 60, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/examples/pipeline/in_to_out_python/check_linux.sh b/examples/pipeline/in_to_out_python/check_linux.sh
index 330f409736963582fe673954211dd6106b83b8ff..eea96607071b6c24156d390bce18de5a60d1214e 100644
--- a/examples/pipeline/in_to_out_python/check_linux.sh
+++ b/examples/pipeline/in_to_out_python/check_linux.sh
@@ -35,8 +35,8 @@ Cleanup() {
     nomad stop receiver
     nomad stop authorizer
     echo "db.dropDatabase()" | mongo ${indatabase_name}
-	echo "db.dropDatabase()" | mongo ${outdatabase_name}
-	rm -rf file1 file2 file3
+  	echo "db.dropDatabase()" | mongo ${outdatabase_name}
+  	rm -rf processed
     rm -rf ${receiver_root_folder}
     rm -rf out
 
@@ -50,13 +50,14 @@ nomad run authorizer.nmd
 
 mkdir -p $receiver_folder
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+mkdir processed
+echo hello1 > processed/file1
+echo hello2 > processed/file2
+echo hello3 > processed/file3
 
 for i in `seq 1 3`;
 do
-	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
+	echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'processed/file$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${indatabase_name}
 done
 
 sleep 1
@@ -71,6 +72,6 @@ cat out | grep "Sent 3 file(s)"
 
 echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name} | tee /dev/stderr | grep "file1_${stream_out}"
 
-cat ${receiver_folder}/file1_${stream_out} | grep hello1
-cat ${receiver_folder}/file2_${stream_out} | grep hello2
-cat ${receiver_folder}/file3_${stream_out} | grep hello3
+cat ${receiver_folder}/processed/file1_${stream_out} | grep hello1
+cat ${receiver_folder}/processed/file2_${stream_out} | grep hello2
+cat ${receiver_folder}/processed/file3_${stream_out} | grep hello3
diff --git a/examples/pipeline/in_to_out_python/check_windows.bat b/examples/pipeline/in_to_out_python/check_windows.bat
index b93a7f38c5ac641a5cb51e34b678d1f0debbaa2e..c22726a793ea8c90bef06d3b9a6a07bd2a764094 100644
--- a/examples/pipeline/in_to_out_python/check_windows.bat
+++ b/examples/pipeline/in_to_out_python/check_windows.bat
@@ -22,13 +22,14 @@ SET nthreads=4
 
 call start_services.bat
 
-for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
+for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"processed\\file%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %indatabase_name%  || goto :error
 
 mkdir %receiver_folder%
+mkdir processed
 
-echo hello1 > file1
-echo hello2 > file2
-echo hello3 > file3
+echo hello1 > processed\file1
+echo hello2 > processed\file2
+echo hello3 > processed\file3
 
 set PYTHONPATH=%2;%3
 
@@ -40,9 +41,9 @@ findstr /I /L /C:"Sent 3 file(s)" out || goto :error
 
 echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name% | findstr  /c:"file1_%stream_out%"  || goto :error
 
-findstr /I /L /C:"hello1" %receiver_folder%\file1_%stream_out% || goto :error
-findstr /I /L /C:"hello2" %receiver_folder%\file2_%stream_out% || goto :error
-findstr /I /L /C:"hello3" %receiver_folder%\file3_%stream_out% || goto :error
+findstr /I /L /C:"hello1" %receiver_folder%\processed\file1_%stream_out% || goto :error
+findstr /I /L /C:"hello2" %receiver_folder%\processed\file2_%stream_out% || goto :error
+findstr /I /L /C:"hello3" %receiver_folder%\processed\file3_%stream_out% || goto :error
 
 
 goto :clean
@@ -56,4 +57,4 @@ call stop_services.bat
 echo db.dropDatabase() | %mongo_exe% %indatabase_name%
 echo db.dropDatabase() | %mongo_exe% %outdatabase_name%
 rmdir /S /Q %receiver_root_folder%
-del file1 file2 file3 out
+rmdir /S /Q processed
diff --git a/examples/pipeline/in_to_out_python/in_to_out.py b/examples/pipeline/in_to_out_python/in_to_out.py
index fa9c2c08314b53356c71b67ab5a018f39fab321e..e2b096337c563fd46921e67369effe712882da8f 100644
--- a/examples/pipeline/in_to_out_python/in_to_out.py
+++ b/examples/pipeline/in_to_out_python/in_to_out.py
@@ -28,7 +28,7 @@ transfer_data=int(transfer_data)>0
 
 broker = asapo_consumer.create_server_broker(source,path, True,beamtime,stream_in,token,timeout_s*1000)
 
-producer  = asapo_producer.create_producer(source,beamtime,'auto', stream_out, token, nthreads, 600)
+producer  = asapo_producer.create_producer(source,'processed',beamtime,'auto', stream_out, token, nthreads, 600)
 
 group_id  = broker.generate_group_id()
 
diff --git a/examples/producer/dummy-data-producer/check_linux.sh b/examples/producer/dummy-data-producer/check_linux.sh
index 2e6f36f0953ddc8bd2104051adb387ac1978531e..1366ebc886dc03cf1edca2ecba893b038c14c433 100644
--- a/examples/producer/dummy-data-producer/check_linux.sh
+++ b/examples/producer/dummy-data-producer/check_linux.sh
@@ -7,14 +7,14 @@ set -e
 trap Cleanup EXIT
 
 Cleanup() {
-rm -rf files
+ rm -rf files
 }
 
 mkdir files
 
 $@ files beamtime_id 11 4 4 1 10 2>&1 | grep Rate
 
-ls -ln files/1 | awk '{ print $5 }'| grep 11000
-ls -ln files/2 | awk '{ print $5 }'| grep 11000
-ls -ln files/3 | awk '{ print $5 }'| grep 11000
-ls -ln files/4 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/1 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/2 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/3 | awk '{ print $5 }'| grep 11000
+ls -ln files/processed/4 | awk '{ print $5 }'| grep 11000
diff --git a/examples/producer/dummy-data-producer/check_windows.bat b/examples/producer/dummy-data-producer/check_windows.bat
index b6a20167fd0b9bb4dcc2e2fcdd98fcf7254fcecf..7e706364aa864981af665e63b8dce8d33858ee1e 100644
--- a/examples/producer/dummy-data-producer/check_windows.bat
+++ b/examples/producer/dummy-data-producer/check_windows.bat
@@ -4,16 +4,16 @@ mkdir %folder%
 
 "%1" %folder% beamtime_id 11 4 4 1 10 2>&1 | findstr "Rate" || goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\2') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\2') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\3') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\3') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
-FOR /F "usebackq" %%A IN ('%folder%\4') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%folder%\processed\4') DO set size=%%~zA
 if %size% NEQ 11000 goto :error
 
 goto :clean
diff --git a/examples/producer/dummy-data-producer/dummy_data_producer.cpp b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
index e8a40201c74d8c6ce139364fada60aae6cf07b16..d5c522d30ad66fde5e4c9b55e803709f2967a2c9 100644
--- a/examples/producer/dummy-data-producer/dummy_data_producer.cpp
+++ b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
@@ -8,7 +8,7 @@
 #include <sstream>
 
 #include "asapo_producer.h"
-
+#include "preprocessor/definitions.h"
 
 using std::chrono::system_clock;
 
@@ -35,6 +35,9 @@ void PrintCommandArguments(const Args& args) {
               << "iterations: " << args.iterations << std::endl
               << "nthreads: " << args.nthreads << std::endl
               << "mode: " << args.mode << std::endl
+              << "Write files: " << ((args.mode %100) / 10 == 1) << std::endl
+              << "Tcp mode: " << ((args.mode % 10) ==0 ) << std::endl
+              << "Raw: " << (args.mode / 100 == 1)<< std::endl
               << "timeout: " << args.timeout_sec << std::endl
               << "images in set: " << args.images_in_set << std::endl
               << std::endl;
@@ -71,7 +74,7 @@ void ProcessCommandArguments(int argc, char* argv[], Args* args) {
         std::cout <<
                   "Usage: " << argv[0] <<
                   " <destination> <beamtime_id[%<stream>%<token>]> <number_of_kbyte> <iterations> <nthreads>"
-                  " <mode x0 -t tcp, x1 - filesystem, 0x - write files, 1x - do not write files> <timeout (sec)> [n images in set (default 1)]"
+                  " <mode 0xx - processed source type, 1xx - raw source type, xx0 -t tcp, xx1 - filesystem, x0x - write files, x1x - do not write files> <timeout (sec)> [n images in set (default 1)]"
                   << std::endl;
         exit(EXIT_FAILURE);
     }
@@ -127,7 +130,7 @@ asapo::FileData CreateMemoryBuffer(size_t size) {
 
 
 bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t iterations, uint64_t images_in_set,
-                   const std::string& stream, bool write_files) {
+                   const std::string& stream, bool write_files, asapo::SourceType type) {
 
     asapo::Error err;
     if (iterations == 0) {
@@ -138,13 +141,17 @@ bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t it
         }
     }
 
-    for(uint64_t i = 0; i < iterations; i++) {
+    std::string image_folder = GetStringFromSourceType(type)+asapo::kPathSeparator;
+
+
+    for (uint64_t i = 0; i < iterations; i++) {
         auto buffer = CreateMemoryBuffer(number_of_byte);
         asapo::EventHeader event_header{i + 1, number_of_byte, std::to_string(i + 1)};
         std::string meta = "{\"user_meta\":\"test" + std::to_string(i + 1) + "\"}";
         if (!stream.empty()) {
             event_header.file_name = stream + "/" + event_header.file_name;
         }
+        event_header.file_name = image_folder+event_header.file_name;
         event_header.user_metadata = std::move(meta);
         if (images_in_set == 1) {
             auto err = producer->SendData(event_header, std::move(buffer), write_files ? asapo::kDefaultIngestMode :
@@ -163,6 +170,7 @@ bool SendDummyData(asapo::Producer* producer, size_t number_of_byte, uint64_t it
                 if (!stream.empty()) {
                     event_header.file_name = stream + "/" + event_header.file_name;
                 }
+                event_header.file_name = image_folder + event_header.file_name;
                 event_header.user_metadata = meta;
                 auto err = producer->SendData(event_header, std::move(buffer), write_files ? asapo::kDefaultIngestMode :
                                               asapo::kTransferData, &ProcessAfterSend);
@@ -180,7 +188,7 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.discovery_service_endpoint, args.nthreads,
                                             args.mode % 10 == 0 ? asapo::RequestHandlerType::kTcp : asapo::RequestHandlerType::kFilesystem,
-                                            asapo::SourceCredentials{args.beamtime_id, "", args.stream, args.token }, 3600, &err);
+                                            asapo::SourceCredentials{args.mode / 100 == 0 ?asapo::SourceType::kProcessed:asapo::SourceType::kRaw,args.beamtime_id, "", args.stream, args.token }, 3600, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
@@ -216,7 +224,7 @@ int main (int argc, char* argv[]) {
     system_clock::time_point start_time = system_clock::now();
 
     if(!SendDummyData(producer.get(), args.number_of_bytes, args.iterations, args.images_in_set, args.stream,
-                      args.mode / 10 == 0)) {
+                      (args.mode %100) / 10 == 0,args.mode / 100 == 0 ?asapo::SourceType::kProcessed:asapo::SourceType::kRaw)) {
         return EXIT_FAILURE;
     }
 
diff --git a/examples/producer/simple-producer/produce.cpp b/examples/producer/simple-producer/produce.cpp
index 68f15c8d2575d567e6bfe94ae29f94a89c734874..7986c4bfa0b5d53f5dbeedabcd8ac0446b84b2a3 100644
--- a/examples/producer/simple-producer/produce.cpp
+++ b/examples/producer/simple-producer/produce.cpp
@@ -34,7 +34,7 @@ int main(int argc, char* argv[]) {
     auto buffer =  asapo::FileData(new uint8_t[send_size]);
     memcpy(buffer.get(), to_send.c_str(), send_size);
 
-    asapo::EventHeader event_header{1, send_size, "test_file"};
+    asapo::EventHeader event_header{1, send_size, "processed"+asapo::kPathseparator +"test_file"};
     err = producer->SendData(event_header, std::move(buffer), asapo::kDefaultIngestMode, &ProcessAfterSend);
     exit_if_error("Cannot send file", err);
 
diff --git a/producer/api/cpp/include/producer/producer.h b/producer/api/cpp/include/producer/producer.h
index ffd752a1e7e49831fbc796bd4b13b0c8eabba9aa..c6292ea08106c8f68997c6c81f17d8ad4047f4b5 100644
--- a/producer/api/cpp/include/producer/producer.h
+++ b/producer/api/cpp/include/producer/producer.h
@@ -10,7 +10,6 @@
 
 namespace asapo {
 
-
 /** @ingroup producer */
 class Producer {
   public:
diff --git a/producer/api/cpp/src/request_handler_tcp.cpp b/producer/api/cpp/src/request_handler_tcp.cpp
index e0931b03e542cae5a20516e0f73e3d8ff024c5bb..befcd52d4dcb7a33d802b820ca5380ea47d424f7 100644
--- a/producer/api/cpp/src/request_handler_tcp.cpp
+++ b/producer/api/cpp/src/request_handler_tcp.cpp
@@ -230,7 +230,9 @@ bool RequestHandlerTcp::SendDataToOneOfTheReceivers(ProducerRequest* request, bo
                 ProcessRequestCallback(std::move(err), request, "", retry);
                 return false;
             } else {
-                if (err != nullptr ) continue;
+                if (err != nullptr ) {
+                    continue;
+                }
             }
         }
 
@@ -245,7 +247,7 @@ bool RequestHandlerTcp::SendDataToOneOfTheReceivers(ProducerRequest* request, bo
         ProcessRequestCallback(std::move(err), request, response, retry);
         return success;
     }
-    log__->Warning("put back to the queue, request opcode: " + std::to_string(request->header.op_code) +
+    log__->Warning((receivers_list_.empty()?std::string("receiver list empty, "):"")+"put back to the queue, request opcode: " + std::to_string(request->header.op_code) +
                    ", id: " + std::to_string(request->header.data_id));
     *retry = true;
     return false;
diff --git a/producer/api/cpp/unittests/test_producer.cpp b/producer/api/cpp/unittests/test_producer.cpp
index 9e82be03ed1e4dbc8ee3ea52da6303bebf53e904..2e1fae519e39b6362758b988f7f266e8bafe0b77 100644
--- a/producer/api/cpp/unittests/test_producer.cpp
+++ b/producer/api/cpp/unittests/test_producer.cpp
@@ -15,7 +15,7 @@ namespace {
 TEST(CreateProducer, TcpProducer) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(dynamic_cast<asapo::ProducerImpl*>(producer.get()), Ne(nullptr));
     ASSERT_THAT(err, Eq(nullptr));
 }
@@ -24,13 +24,13 @@ TEST(CreateProducer, ErrorBeamtime) {
     asapo::Error err;
     std::string expected_beamtimeid(asapo::kMaxMessageSize * 10, 'a');
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{expected_beamtimeid, "", "", ""}, 3600, &err);
+                                                SourceCredentials{asapo::SourceType::kRaw,expected_beamtimeid, "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
 
 TEST(CreateProducer, ErrorOnBothAutoBeamlineBeamtime) {
-    asapo::SourceCredentials creds{"auto", "auto", "subname", "token"};
+    asapo::SourceCredentials creds{asapo::SourceType::kRaw,"auto", "auto", "subname", "token"};
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp,
                                                 creds, 3600, &err);
@@ -41,7 +41,7 @@ TEST(CreateProducer, ErrorOnBothAutoBeamlineBeamtime) {
 TEST(CreateProducer, TooManyThreads) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("", asapo::kMaxProcessingThreads + 1,
-                                                asapo::RequestHandlerType::kTcp, SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                asapo::RequestHandlerType::kTcp, SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
@@ -50,7 +50,7 @@ TEST(CreateProducer, TooManyThreads) {
 TEST(CreateProducer, ZeroThreads) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("", 0,
-                                                asapo::RequestHandlerType::kTcp, SourceCredentials{"bt", "", "", ""}, 3600, &err);
+                                                asapo::RequestHandlerType::kTcp, SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600, &err);
     ASSERT_THAT(producer, Eq(nullptr));
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
@@ -59,7 +59,7 @@ TEST(CreateProducer, ZeroThreads) {
 TEST(Producer, SimpleWorkflowWihoutConnection) {
     asapo::Error err;
     std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("hello", 5, asapo::RequestHandlerType::kTcp,
-                                                SourceCredentials{"bt", "", "", ""}, 3600,
+                                                SourceCredentials{asapo::SourceType::kRaw,"bt", "", "", ""}, 3600,
                                                 &err);
 
     asapo::EventHeader event_header{1, 1, "test"};
diff --git a/producer/api/cpp/unittests/test_producer_impl.cpp b/producer/api/cpp/unittests/test_producer_impl.cpp
index fdc8f4c04eaa985068f4497c81bc6a9634e918e0..0baff125416d89528e490eeb30fb39a7db053c91 100644
--- a/producer/api/cpp/unittests/test_producer_impl.cpp
+++ b/producer/api/cpp/unittests/test_producer_impl.cpp
@@ -72,14 +72,14 @@ class ProducerImplTests : public testing::Test {
     char expected_substream[asapo::kMaxMessageSize] = "test_substream";
     std::string expected_next_substream = "next_substream";
 
-    asapo::SourceCredentials expected_credentials{"beamtime_id", "beamline", "subname", "token"
+    asapo::SourceCredentials expected_credentials{asapo::SourceType::kRaw,"beamtime_id", "beamline", "subname", "token"
     };
     asapo::SourceCredentials expected_default_credentials{
-        "beamtime_id", "", "", "token"
+        asapo::SourceType::kProcessed,"beamtime_id", "", "", "token"
     };
 
-    std::string expected_credentials_str = "beamtime_id%beamline%subname%token";
-    std::string expected_default_credentials_str = "beamtime_id%auto%detector%token";
+    std::string expected_credentials_str = "raw%beamtime_id%beamline%subname%token";
+    std::string expected_default_credentials_str = "processed%beamtime_id%auto%detector%token";
 
     std::string expected_metadata = "meta";
     std::string expected_fullpath = "filename";
@@ -391,7 +391,7 @@ TEST_F(ProducerImplTests, OKSendingSendFileRequestWithSubstream) {
 
 TEST_F(ProducerImplTests, ErrorSettingBeamtime) {
     std::string long_str(asapo::kMaxMessageSize * 10, 'a');
-    expected_credentials = asapo::SourceCredentials{long_str, "", "", ""};
+    expected_credentials = asapo::SourceCredentials{asapo::SourceType::kRaw,long_str, "", "", ""};
     EXPECT_CALL(mock_logger, Error(testing::HasSubstr("too long")));
 
     auto err = producer.SetCredentials(expected_credentials);
@@ -402,8 +402,8 @@ TEST_F(ProducerImplTests, ErrorSettingBeamtime) {
 TEST_F(ProducerImplTests, ErrorSettingSecondTime) {
     EXPECT_CALL(mock_logger, Error(testing::HasSubstr("already")));
 
-    producer.SetCredentials(asapo::SourceCredentials{"1", "", "2", "3"});
-    auto err = producer.SetCredentials(asapo::SourceCredentials{"4", "", "5", "6"});
+    producer.SetCredentials(asapo::SourceCredentials{asapo::SourceType::kRaw,"1", "", "2", "3"});
+    auto err = producer.SetCredentials(asapo::SourceCredentials{asapo::SourceType::kRaw,"4", "", "5", "6"});
 
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
 }
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index 61e5f6beaec0893b7cca06a3581279825e4ab29f..d08450fc364c34be3c71508a16cb44076714575e 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -22,6 +22,8 @@ cdef extern from "asapo_producer.h" namespace "asapo":
   ErrorTemplateInterface kLocalIOError "asapo::ProducerErrorTemplates::kLocalIOError"
   ErrorTemplateInterface kServerWarning "asapo::ProducerErrorTemplates::kServerWarning"
 
+
+
 cdef extern from "asapo_producer.h" namespace "asapo":
   cppclass FileData:
     unique_ptr[uint8_t[]] release()
@@ -46,11 +48,15 @@ cdef extern from "asapo_producer.h" namespace "asapo":
 
 
 cdef extern from "asapo_producer.h" namespace "asapo":
+  cppclass SourceType:
+    pass
+  cdef Error GetSourceTypeFromString(string types,SourceType * type)
   struct  SourceCredentials:
     string beamtime_id
     string beamline
     string stream
     string user_token
+    SourceType type
 
 cdef extern from "asapo_producer.h" namespace "asapo":
   struct  EventHeader:
@@ -92,6 +98,7 @@ cdef extern from "asapo_wrappers.h" namespace "asapo":
     RequestCallback unwrap_callback_with_memory(RequestCallbackCythonMemory, void*,void*,void*)
 
 
+
 cdef extern from "asapo_producer.h" namespace "asapo" nogil:
     cppclass Producer:
         @staticmethod
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index db3f83dc3d77b3cbe4e84a8115a7d25a684a4e26..7a991b6b8120bada992c819f04e4464548b76bc2 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -301,23 +301,30 @@ cdef class PyProducer:
             if self.c_producer.get() is not NULL:
                 self.c_producer.get().StopThreads__()
     @staticmethod
-    def __create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
+    def __create_producer(endpoint,type,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
         pyProd = PyProducer()
         cdef Error err
+        cdef SourceType source_type
+        err = GetSourceTypeFromString(type,&source_type)
+        if err:
+            throw_exception(err)
         cdef SourceCredentials source
         source.beamtime_id = beamtime_id
         source.beamline = beamline
         source.user_token = token
         source.stream = stream
+        source.type = source_type
         pyProd.c_producer = Producer.Create(endpoint,nthreads,RequestHandlerType_Tcp,source,timeout_sec,&err)
         if err:
             throw_exception(err)
         return pyProd
 
-def create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
+def create_producer(endpoint,type,beamtime_id,beamline,stream,token,nthreads,timeout_sec):
     """
          :param endpoint: server endpoint (url:port)
          :type endpoint: string
+         :param type: source type, "raw" to write to "raw" folder in beamline filesystem,"processed" to write to "processed" folder in core filesystem
+         :type type: string
          :param beamtime_id: beamtime id, can be "auto" if beamline is given, will automatically select the current beamtime id
          :type beamtime_id: string
          :param beamline: beamline name, can be "auto" if beamtime_id is given
@@ -334,7 +341,7 @@ def create_producer(endpoint,beamtime_id,beamline,stream,token,nthreads,timeout_
             AsapoWrongInputError: wrong input (number of threads, ,,,)
             AsapoProducerError: actually should not happen
     """
-    return PyProducer.__create_producer(_bytes(endpoint),_bytes(beamtime_id),_bytes(beamline),_bytes(stream),_bytes(token),nthreads,timeout_sec)
+    return PyProducer.__create_producer(_bytes(endpoint),_bytes(type),_bytes(beamtime_id),_bytes(beamline),_bytes(stream),_bytes(token),nthreads,timeout_sec)
 
 
 __version__ = "@PYTHON_ASAPO_VERSION@@ASAPO_VERSION_COMMIT@"
diff --git a/producer/event_monitor_producer/src/main_eventmon.cpp b/producer/event_monitor_producer/src/main_eventmon.cpp
index 0b599676e70f8c89034ba31d7f7020421cd35c81..d2ebf9f0aaa605b8e5b1adae3ca25a2bf2365f4e 100644
--- a/producer/event_monitor_producer/src/main_eventmon.cpp
+++ b/producer/event_monitor_producer/src/main_eventmon.cpp
@@ -39,7 +39,7 @@ std::unique_ptr<Producer> CreateProducer() {
 
     Error err;
     auto producer = Producer::Create(config->asapo_endpoint, (uint8_t) config->nthreads,
-                                     config->mode, asapo::SourceCredentials{config->beamtime_id, "", config->stream, ""}, 3600, &err);
+                                     config->mode, asapo::SourceCredentials{asapo::SourceType::kProcessed,config->beamtime_id, "", config->stream, ""}, 3600, &err);
     if(err) {
         std::cerr << "cannot create producer: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/receiver/CMakeLists.txt b/receiver/CMakeLists.txt
index 398f86a6c54a0b6fff54bd2d831a4ff45711bb7d..f77b3213b2d76590a8b277a5744f0ba690ca02d6 100644
--- a/receiver/CMakeLists.txt
+++ b/receiver/CMakeLists.txt
@@ -100,6 +100,7 @@ set(TEST_SOURCE_FILES
         unittests/request_handler/test_requests_dispatcher.cpp
         unittests/test_datacache.cpp
         unittests/file_processors/test_write_file_processor.cpp
+        unittests/file_processors/test_file_processor.cpp
         unittests/file_processors/test_receive_file_processor.cpp
         )
 #
diff --git a/receiver/src/file_processors/file_processor.cpp b/receiver/src/file_processors/file_processor.cpp
index d940e31395914b397b6e749ddcc04052faf58862..51866022aba9a238e21a7a878197a017a09acba5 100644
--- a/receiver/src/file_processors/file_processor.cpp
+++ b/receiver/src/file_processors/file_processor.cpp
@@ -2,6 +2,8 @@
 
 #include "io/io_factory.h"
 #include "../receiver_logger.h"
+#include "../receiver_config.h"
+#include "../request.h"
 
 namespace asapo {
 
@@ -9,4 +11,40 @@ FileProcessor::FileProcessor(): io__{GenerateDefaultIO()}, log__{GetDefaultRecei
 
 }
 
+Error GetRootFolder(const Request* request, std::string* root_folder) {
+    std::string root;
+    auto fname = request->GetFileName();
+    auto pos = fname.find(asapo::kPathSeparator);
+    if (pos == std::string::npos) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("cannot extract root folder from file path "+fname);
+    }
+
+    auto posr = fname.find("..");
+    if (posr != std::string::npos) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("cannot use relative path in path name "+fname);
+    }
+
+    std::string file_folder = fname.substr(0, pos);
+    auto folder_by_type = GetStringFromSourceType(request->GetSourceType());
+    if (file_folder!=folder_by_type) {
+        return ReceiverErrorTemplates::kBadRequest.Generate("file "+fname+" is not in "+folder_by_type +" folder");
+    }
+
+    switch (request->GetSourceType()) {
+        case SourceType::kProcessed:
+            root = request->GetOfflinePath();
+            break;
+        case SourceType::kRaw:
+            root = request->GetOnlinePath();
+            if (root.empty()) {
+                return ReceiverErrorTemplates::kBadRequest.Generate("online path not available");
+            }
+            break;
+    }
+
+    *root_folder = root;
+    return nullptr;
+}
+
+
 }
diff --git a/receiver/src/file_processors/file_processor.h b/receiver/src/file_processors/file_processor.h
index b57ca733c3db3c4d42df5b36312c41702ccc868c..433a4c896fd13967b0de8120ba274047245a2c41 100644
--- a/receiver/src/file_processors/file_processor.h
+++ b/receiver/src/file_processors/file_processor.h
@@ -8,6 +8,8 @@ namespace asapo {
 
 class Request;
 
+Error GetRootFolder(const Request* request, std::string* root_folder);
+
 class FileProcessor {
   public:
     FileProcessor();
diff --git a/receiver/src/file_processors/receive_file_processor.cpp b/receiver/src/file_processors/receive_file_processor.cpp
index 1388e4f1c08613ed7ee07a19a46a2d4aadd7f198..7e291bd313efc8c53e4b6d72f68b2fa8777ec230 100644
--- a/receiver/src/file_processors/receive_file_processor.cpp
+++ b/receiver/src/file_processors/receive_file_processor.cpp
@@ -16,8 +16,12 @@ Error ReceiveFileProcessor::ProcessFile(const Request* request, bool overwrite)
     auto fsize = request->GetDataSize();
     auto socket = request->GetSocket();
     auto fname = request->GetFileName();
-    auto root_folder = request->GetOfflinePath();
-    auto err =  io__->ReceiveDataToFile(socket, root_folder, fname, (size_t) fsize, true, overwrite);
+    std::string root_folder;
+    auto err = GetRootFolder(request,&root_folder);
+    if (err) {
+        return err;
+    }
+    err =  io__->ReceiveDataToFile(socket, root_folder, fname, (size_t) fsize, true, overwrite);
     if (!err) {
         log__->Debug("received file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
     }
diff --git a/receiver/src/file_processors/write_file_processor.cpp b/receiver/src/file_processors/write_file_processor.cpp
index 58a956f10536020a51f2c810d73af0e6901aa494..8437160e2bc22cb89a44bc76f3bfc332369920ce 100644
--- a/receiver/src/file_processors/write_file_processor.cpp
+++ b/receiver/src/file_processors/write_file_processor.cpp
@@ -21,9 +21,13 @@ Error WriteFileProcessor::ProcessFile(const Request* request, bool overwrite) co
 
     auto data = request->GetData();
     auto fname = request->GetFileName();
-    auto root_folder = request->GetOfflinePath();
+    std::string root_folder;
+    auto err = GetRootFolder(request,&root_folder);
+    if (err) {
+        return err;
+    }
 
-    auto err =  io__->WriteDataToFile(root_folder, fname, (uint8_t*)data, (size_t) fsize, true, overwrite);
+    err =  io__->WriteDataToFile(root_folder, fname, (uint8_t*)data, (size_t) fsize, true, overwrite);
     if (!err) {
         log__->Debug("saved file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
     }
diff --git a/receiver/src/request.cpp b/receiver/src/request.cpp
index 8ca9d9b4c017c58e8063a427acc837af872ad1ad..21b11f5948bf7515327ea174e2c08c25dc23b383 100644
--- a/receiver/src/request.cpp
+++ b/receiver/src/request.cpp
@@ -193,5 +193,11 @@ const ResponseMessageType Request::GetResponseMessageType() const {
 Error Request::CheckForDuplicates()  {
     return check_duplicate_request_handler_->ProcessRequest(this);
 }
+void Request::SetSourceType(SourceType type) {
+    source_type_ = type;
+}
+SourceType Request::GetSourceType() const {
+    return source_type_;
+}
 
 }
diff --git a/receiver/src/request.h b/receiver/src/request.h
index 6bfbcc38f84c53203e1a2c0e66342128cd7286ae..eb3c3a55373835fe18b456505853df3ac057fc12 100644
--- a/receiver/src/request.h
+++ b/receiver/src/request.h
@@ -19,6 +19,8 @@
 #include "data_cache.h"
 
 #include "preprocessor/definitions.h"
+#include "file_processors/file_processor.h"
+
 namespace asapo {
 
 using RequestHandlerList = std::vector<const ReceiverRequestHandler*>;
@@ -52,6 +54,9 @@ class Request {
     VIRTUAL void SetBeamtimeId(std::string beamtime_id);
     VIRTUAL void SetBeamline(std::string beamline);
 
+    VIRTUAL void SetSourceType(SourceType);
+    VIRTUAL SourceType GetSourceType() const;
+
     VIRTUAL const std::string& GetStream() const;
     VIRTUAL void SetStream(std::string stream);
     VIRTUAL void SetMetadata(std::string metadata);
@@ -93,9 +98,9 @@ class Request {
     std::string response_message_;
     ResponseMessageType response_message_type_;
     const RequestHandlerDbCheckRequest* check_duplicate_request_handler_;
+    SourceType source_type_ = SourceType::kProcessed;
 };
 
-
 }
 
 #endif //ASAPO_REQUEST_H
diff --git a/receiver/src/request_handler/request_handler_authorize.cpp b/receiver/src/request_handler/request_handler_authorize.cpp
index 685eb59b577bd8e107367609f80f690db5b5a507..3b5f7fc59cde995b1fb342e3205dd194afdbf878 100644
--- a/receiver/src/request_handler/request_handler_authorize.cpp
+++ b/receiver/src/request_handler/request_handler_authorize.cpp
@@ -41,16 +41,20 @@ Error RequestHandlerAuthorize::Authorize(Request* request, const char* source_cr
         return auth_error;
     }
 
+    std::string stype;
+
     JsonStringParser parser{response};
     (err = parser.GetString("beamtimeId", &beamtime_id_)) ||
     (err = parser.GetString("stream", &stream_)) ||
     (err = parser.GetString("core-path", &offline_path_)) ||
     (err = parser.GetString("beamline-path", &online_path_)) ||
+    (err = parser.GetString("source-type", &stype)) ||
+    (err = GetSourceTypeFromString(stype, &source_type_)) ||
     (err = parser.GetString("beamline", &beamline_));
     if (err) {
         return ErrorFromAuthorizationServerResponse(err, code);
     } else {
-        log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() + " beamline: " +
+        log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() +"source type: "+stype+ " beamline: " +
                      beamline_ + ", beamtime id: " + beamtime_id_ + ", stream: " + stream_);
     }
 
@@ -105,6 +109,7 @@ Error RequestHandlerAuthorize::ProcessOtherRequest(Request* request) const {
     request->SetStream(stream_);
     request->SetOfflinePath(offline_path_);
     request->SetOnlinePath(online_path_);
+    request->SetSourceType(source_type_);
     return nullptr;
 }
 
diff --git a/receiver/src/request_handler/request_handler_authorize.h b/receiver/src/request_handler/request_handler_authorize.h
index c0bcd062b046094ab4cdb26422df9489cdda7c44..7d6af9aabccf1ee305a036a717ad9043a8fb823b 100644
--- a/receiver/src/request_handler/request_handler_authorize.h
+++ b/receiver/src/request_handler/request_handler_authorize.h
@@ -25,6 +25,7 @@ class RequestHandlerAuthorize final: public ReceiverRequestHandler {
     mutable std::string beamline_;
     mutable std::string offline_path_;
     mutable std::string online_path_;
+    mutable SourceType source_type_;
     mutable std::string cached_source_credentials_;
     mutable std::chrono::system_clock::time_point last_updated_;
     Error ProcessAuthorizationRequest(Request* request) const;
diff --git a/receiver/src/statistics/statistics_sender_influx_db.cpp b/receiver/src/statistics/statistics_sender_influx_db.cpp
index f1bea1305683d97604e1363658427f9d293f8010..e6b1ca8a706b0dd571e784c7182ab4245c501df9 100644
--- a/receiver/src/statistics/statistics_sender_influx_db.cpp
+++ b/receiver/src/statistics/statistics_sender_influx_db.cpp
@@ -35,8 +35,6 @@ void StatisticsSenderInfluxDb::SendStatistics(const StatisticsToSend& statistic)
         log__->Error(msg + " - " + response);
         return;
     }
-
-    log__->Debug(msg);
 }
 
 std::string StatisticsSenderInfluxDb::StatisticsToString(const StatisticsToSend& statistic) const noexcept {
diff --git a/receiver/unittests/file_processors/test_file_processor.cpp b/receiver/unittests/file_processors/test_file_processor.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..7b658f5985f75cea4dac81b1264f343559806ecf
--- /dev/null
+++ b/receiver/unittests/file_processors/test_file_processor.cpp
@@ -0,0 +1,122 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+
+#include "unittests/MockIO.h"
+#include "unittests/MockLogger.h"
+
+#include "../../src/file_processors/receive_file_processor.h"
+#include "common/networking.h"
+#include "preprocessor/definitions.h"
+#include "../mock_receiver_config.h"
+
+#include "../receiver_mocking.h"
+
+using ::testing::Test;
+using ::testing::Return;
+using ::testing::ReturnRef;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::SetArgReferee;
+using ::testing::Gt;
+using ::testing::Eq;
+using ::testing::Ne;
+using ::testing::Mock;
+using ::testing::NiceMock;
+using ::testing::InSequence;
+using ::testing::SetArgPointee;
+using ::testing::AllOf;
+using ::testing::HasSubstr;
+
+
+using ::asapo::Error;
+using ::asapo::GetRootFolder;
+using ::asapo::ErrorInterface;
+using ::asapo::FileDescriptor;
+using ::asapo::SocketDescriptor;
+using ::asapo::MockIO;
+using asapo::Request;
+using asapo::ReceiveFileProcessor;
+using ::asapo::GenericRequestHeader;
+using asapo::MockRequest;
+
+namespace {
+
+class FileProcessorTests : public Test {
+  public:
+    NiceMock<MockIO> mock_io;
+    std::unique_ptr<MockRequest> mock_request;
+    NiceMock<asapo::MockLogger> mock_logger;
+    std::string expected_offline_path =  "offline";
+    std::string expected_online_path =  "online";
+    void MockRequestData(std::string fname,asapo::SourceType type);
+    void SetUp() override {
+        GenericRequestHeader request_header;
+        request_header.data_id = 2;
+        asapo::ReceiverConfig test_config;
+        asapo::SetReceiverConfig(test_config, "none");
+        mock_request.reset(new MockRequest{request_header, 1, "", nullptr});
+    }
+    void TearDown() override {
+    }
+
+};
+
+void FileProcessorTests::MockRequestData(std::string fname,asapo::SourceType type) {
+
+    if (type == asapo::SourceType::kProcessed) {
+            EXPECT_CALL(*mock_request, GetOfflinePath())
+             .WillRepeatedly(ReturnRef(expected_offline_path));
+    } else {
+        EXPECT_CALL(*mock_request, GetOnlinePath())
+            .WillRepeatedly(ReturnRef(expected_online_path));
+    }
+
+    EXPECT_CALL(*mock_request, GetSourceType()).WillRepeatedly(Return(type));
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(1)
+    .WillRepeatedly(Return(fname));
+}
+
+
+std::string repl_sep(const std::string& orig) {
+    std::string str = orig;
+    std::replace(str.begin(), str.end(), '/', asapo::kPathSeparator); // needed for Windows tests
+    return str;
+}
+
+TEST_F(FileProcessorTests, RawWriteToRaw) {
+
+    struct Test {
+      asapo::SourceType type;
+      std::string filename;
+      bool error;
+      std::string res;
+    };
+    std::vector<Test> tests = {
+        Test{asapo::SourceType::kProcessed,repl_sep("processed/bla.text"),false,expected_offline_path},
+        Test{asapo::SourceType::kProcessed,repl_sep("raw/bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("processed/../bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("bla/bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("bla.text"),true,""},
+        Test{asapo::SourceType::kProcessed,repl_sep("./bla.text"),true,""},
+        Test{asapo::SourceType::kRaw,repl_sep("raw/bla.text"),false,expected_online_path},
+    };
+
+    for (auto& test: tests) {
+        MockRequestData(test.filename,test.type);
+        std::string res;
+        auto err = GetRootFolder(mock_request.get(),&res);
+        if (test.error) {
+            ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kBadRequest));
+        } else {
+            ASSERT_THAT(err, Eq(nullptr));
+            ASSERT_THAT(res, Eq(test.res));
+        }
+        Mock::VerifyAndClearExpectations(&mock_request);
+    }
+
+}
+
+
+
+}
diff --git a/receiver/unittests/file_processors/test_receive_file_processor.cpp b/receiver/unittests/file_processors/test_receive_file_processor.cpp
index 65648c87122ce5d56899796f7b4ac44a058c2490..612726b465a079215bf46abb014bb1c9cefdc153 100644
--- a/receiver/unittests/file_processors/test_receive_file_processor.cpp
+++ b/receiver/unittests/file_processors/test_receive_file_processor.cpp
@@ -54,11 +54,12 @@ class ReceiveFileProcessorTests : public Test {
     std::unique_ptr<MockRequest> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
     SocketDescriptor expected_socket_id = SocketDescriptor{1};
-    std::string expected_file_name = "2";
+    std::string expected_file_name = std::string("processed")+asapo::kPathSeparator+std::string("2");
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_beamline = "beamline";
     std::string expected_facility = "facility";
     std::string expected_year = "2020";
+    asapo::SourceType expected_source_type = asapo::SourceType::kProcessed;
     uint64_t expected_file_size = 10;
     bool expected_overwrite = false;
     std::string expected_root_folder = "root_folder";
@@ -97,7 +98,11 @@ void ReceiveFileProcessorTests::MockRequestData() {
     EXPECT_CALL(*mock_request, GetOfflinePath()).Times(1)
     .WillRepeatedly(ReturnRef(expected_full_path));
 
-    EXPECT_CALL(*mock_request, GetFileName()).Times(1)
+    EXPECT_CALL(*mock_request, GetSourceType()).Times(2)
+        .WillRepeatedly(Return(expected_source_type));
+
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(2)
     .WillRepeatedly(Return(expected_file_name));
 }
 
diff --git a/receiver/unittests/file_processors/test_write_file_processor.cpp b/receiver/unittests/file_processors/test_write_file_processor.cpp
index b438322cd23ddb8d5ae545d68705fbcbf22af702..6b6f050f5f893a7504a83d0f27950cc50a053b26 100644
--- a/receiver/unittests/file_processors/test_write_file_processor.cpp
+++ b/receiver/unittests/file_processors/test_write_file_processor.cpp
@@ -53,7 +53,8 @@ class WriteFileProcessorTests : public Test {
     NiceMock<MockIO> mock_io;
     std::unique_ptr<MockRequest> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
-    std::string expected_file_name = "2";
+    std::string expected_file_name = std::string("raw")+asapo::kPathSeparator+std::string("2");
+    asapo::SourceType expected_source_type = asapo::SourceType::kRaw;
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_beamline = "beamline";
     std::string expected_facility = "facility";
@@ -100,10 +101,14 @@ void WriteFileProcessorTests::MockRequestData(int times) {
     EXPECT_CALL(*mock_request, GetData()).Times(times)
     .WillRepeatedly(Return(nullptr));
 
-    EXPECT_CALL(*mock_request, GetOfflinePath()).Times(times)
+    EXPECT_CALL(*mock_request, GetOnlinePath()).Times(times)
     .WillRepeatedly(ReturnRef(expected_full_path));
 
-    EXPECT_CALL(*mock_request, GetFileName()).Times(times)
+    EXPECT_CALL(*mock_request, GetSourceType()).Times(times*2)
+        .WillRepeatedly(Return(expected_source_type));
+
+
+    EXPECT_CALL(*mock_request, GetFileName()).Times(times*2)
     .WillRepeatedly(Return(expected_file_name));
 }
 
diff --git a/receiver/unittests/receiver_mocking.h b/receiver/unittests/receiver_mocking.h
index c8bd31122bd88571e3c9b28ef9ff312762aba355..7b72249a58fc9bfa4250b6f1e9f0d6b4ea6748c0 100644
--- a/receiver/unittests/receiver_mocking.h
+++ b/receiver/unittests/receiver_mocking.h
@@ -92,6 +92,9 @@ class MockRequest: public Request {
     MOCK_METHOD1(SetOnlinePath, void (std::string));
     MOCK_METHOD1(SetOfflinePath, void (std::string));
 
+    MOCK_METHOD1(SetSourceType, void (SourceType));
+    MOCK_CONST_METHOD0(GetSourceType, SourceType ());
+
     MOCK_CONST_METHOD0(WasAlreadyProcessed, bool());
     MOCK_METHOD0(SetAlreadyProcessedFlag, void());
     MOCK_METHOD2(SetResponseMessage, void(std::string, ResponseMessageType));
diff --git a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
index bfb8e8f1147253987e7178c93ab4fd43aad1916a..53ff3c5184e5003b17400f93db8f0bc7a5acd3d8 100644
--- a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
@@ -72,10 +72,12 @@ class AuthorizerHandlerTests : public Test {
     std::string expected_authorization_server = "authorizer_host";
     std::string expect_request_string;
     std::string expected_source_credentials;
+    asapo::SourceType expected_source_type = asapo::SourceType::kProcessed;
+    std::string expected_source_type_str = "processed";
     void MockRequestData();
     void SetUp() override {
         GenericRequestHeader request_header;
-        expected_source_credentials = expected_beamtime_id + "%stream%token";
+        expected_source_credentials = "processed%"+expected_beamtime_id + "%stream%token";
         expect_request_string = std::string("{\"SourceCredentials\":\"") + expected_source_credentials +
                                 "\",\"OriginHost\":\"" +
                                 expected_producer_uri + "\"}";
@@ -112,12 +114,14 @@ class AuthorizerHandlerTests : public Test {
                              "\",\"stream\":" + "\"" + expected_stream +
                              "\",\"beamline-path\":" + "\"" + expected_beamline_path +
                              "\",\"core-path\":" + "\"" + expected_core_path +
+                             "\",\"source-type\":" + "\"" + expected_source_type_str +
                              "\",\"beamline\":" + "\"" + expected_beamline + "\"}")
                      ));
             if (code != HttpCode::OK) {
                 EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"),
                                                      HasSubstr("return code"),
                                                      HasSubstr(std::to_string(int(code))),
+                                                     HasSubstr(expected_source_type_str),
                                                      HasSubstr(expected_beamtime_id),
                                                      HasSubstr(expected_stream),
                                                      HasSubstr(expected_producer_uri),
@@ -126,6 +130,7 @@ class AuthorizerHandlerTests : public Test {
                 EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("authorized"),
                                                      HasSubstr(expected_beamtime_id),
                                                      HasSubstr(expected_beamline),
+                                                     HasSubstr(expected_source_type_str),
                                                      HasSubstr(expected_stream),
                                                      HasSubstr(expected_producer_uri))));
             }
@@ -155,6 +160,7 @@ class AuthorizerHandlerTests : public Test {
             EXPECT_CALL(*mock_request, SetOfflinePath(expected_core_path));
             EXPECT_CALL(*mock_request, SetOnlinePath(expected_beamline_path));
             EXPECT_CALL(*mock_request, SetBeamline(expected_beamline));
+            EXPECT_CALL(*mock_request, SetSourceType(expected_source_type));
         }
 
         MockAuthRequest(error, code);
@@ -262,7 +268,7 @@ TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeUsesCachedValue) {
     EXPECT_CALL(*mock_request, SetStream(expected_stream));
     EXPECT_CALL(*mock_request, SetOnlinePath(expected_beamline_path));
     EXPECT_CALL(*mock_request, SetOfflinePath(expected_core_path));
-
+    EXPECT_CALL(*mock_request, SetSourceType(expected_source_type));
     auto err =  handler.ProcessRequest(mock_request.get());
 
     ASSERT_THAT(err, Eq(nullptr));
diff --git a/receiver/unittests/statistics/test_receiver_statistics.cpp b/receiver/unittests/statistics/test_receiver_statistics.cpp
index de05a2a3f12f4df3946c8422819c035b892cd27f..413aeca86a8267a88b3a52da18e4217b95d73a68 100644
--- a/receiver/unittests/statistics/test_receiver_statistics.cpp
+++ b/receiver/unittests/statistics/test_receiver_statistics.cpp
@@ -127,7 +127,7 @@ TEST_F(ReceiverStatisticTests, TimerForAll) {
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Ge(0.25));
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Le(0.35));
 
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Ge(0.35));
+    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Ge(0.3));
     ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Le(0.45));
 }
 
diff --git a/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp b/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
index f27906f7cc5034d96886a2a8eee522b6429c1263..7faa0e4ff1a5acc6cc1c9bab96a0b49b25c10feb 100644
--- a/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
+++ b/receiver/unittests/statistics/test_statistics_sender_influx_db.cpp
@@ -106,22 +106,4 @@ TEST_F(SenderInfluxDbTests, LogErrorWithWrongResponceSendStatistics) {
     sender.SendStatistics(statistics);
 }
 
-TEST_F(SenderInfluxDbTests, LogDebugSendStatistics) {
-    EXPECT_CALL(mock_http_client, Post_t(_, _, _, _, _)).
-    WillOnce(
-        DoAll(SetArgPointee<4>(nullptr), SetArgPointee<3>(asapo::HttpCode::OK), Return("ok response")
-             ));
-
-    EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("sending statistics"),
-                                         HasSubstr(config.performance_db_uri),
-                                         HasSubstr(config.performance_db_name)
-                                        )
-                                  )
-               );
-
-
-    sender.SendStatistics(statistics);
-}
-
-
 }
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 11b4622c6528a19a79daa5807874d7f8093862ea..4fbf5ab06f9058a9f31ff4a938826d91a31f9f06 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -6,3 +6,4 @@ configure_files(${CMAKE_CURRENT_SOURCE_DIR}/manual/tests_via_nomad ${CMAKE_CURRE
 add_subdirectory(manual/performance_broker_receiver)
 
 add_subdirectory(manual/asapo_fabric)
+add_subdirectory(manual/producer_cpp)
diff --git a/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt b/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt
deleted file mode 100644
index b218c3ec6a28d10fd6e94d86d61992eb79bdbf65..0000000000000000000000000000000000000000
--- a/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-Open beam times as of  Thursday, 2018/06/21 11:32
-Faclty	BL	BeamTime Id		kind
-flash	bl1	11003924		beamtime	start: 2018-04-24
-flash	bl2	11003921		beamtime	start: 2018-06-08
-flash	fl24	11001734		beamtime	start: 2018-06-13
-flash	pg2	11003932		beamtime	start: 2018-06-11
-flash	thz	11005667		beamtime	start: 2018-05-24
-petra3	ext	50000181		beamtime	start: 2017-04-12
-petra3	ext	50000193		beamtime	start: 2017-10-12
-petra3	ext	50000202		beamtime	start: 2017-12-06
-petra3	ext	50000209		beamtime	start: 2018-02-19
-petra3	ext	50000211		beamtime	start: 2018-02-19
-petra3	ext	50000214		beamtime	start: 2018-04-23
-petra3	ext	50000215		beamtime	start: 2018-03-23
-petra3	ext	50000216		beamtime	start: 2018-03-23
-petra3	ext	50000217		beamtime	start: 2018-03-23
-petra3	ext	50000218		beamtime	start: 2018-03-23
-petra3	ext	50000219		beamtime	start: 2018-04-24
-petra3	ext	50000221		beamtime	start: 2018-06-14
-petra3	p01	11004172		beamtime	start: 2018-06-20
-petra3	p01	c20180508-000-COM20181	commissioning
-petra3	p02.1	11004341		beamtime	start: 2018-06-18
diff --git a/tests/automatic/authorizer/check_authorize/check_linux.sh b/tests/automatic/authorizer/check_authorize/check_linux.sh
index ceecdffca5e6530fe0b2241452fe8398da5b410d..d4befd2b4d47a23fe0602cf1a01d196706c4a152 100644
--- a/tests/automatic/authorizer/check_authorize/check_linux.sh
+++ b/tests/automatic/authorizer/check_authorize/check_linux.sh
@@ -14,32 +14,33 @@ $@ -config settings.json &
 sleep 1
 authorizeid=`echo $!`
 
-mkdir -p asap3/petra3/gpfs/p01/2019/comissioning/c20180508-000-COM20181
-mkdir -p asap3/petra3/gpfs/p01/2019/data/11000015
+mkdir -p asap3/petra3/gpfs/p00/2019/comissioning/c20180508-000-COM20181
+mkdir -p asap3/petra3/gpfs/p00/2019/data/11000015
 mkdir -p beamline/p07/current
 cp beamtime-metadata* beamline/p07/current/
 
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep c20180508-000-COM20181
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep c20180508-000-COM20181
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p00
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%stream%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
 
 token=onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90= #token for c20180508-000-COM20181
-
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%auto%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
-curl -v --silent --data '{"SourceCredentials":"c20180508-000-COM20181%%stream%bla","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep stream
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%auto%stream%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p00
+curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%stream%bla","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
 token=dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0= #token for 11000015
-curl -v --silent --data '{"SourceCredentials":"11000015%%stream%dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p01
+#beamtine not online
+curl -v --silent --data '{"SourceCredentials":"raw%11000015%%stream%dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
 token=Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs= #token for 11000016
-curl -v --silent --data '{"SourceCredentials":"11000016%%stream%Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+curl -v --silent --data '{"SourceCredentials":"raw%11000016%%stream%Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
 
 token=-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4= # for bl_p07
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
-curl -v --silent --data '{"SourceCredentials":"auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
+curl -v --silent --data '{"SourceCredentials":"processed%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
+curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%stream%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
 
 
 rm -rf asap3 beamline
\ No newline at end of file
diff --git a/tests/automatic/authorizer/check_authorize/check_windows.bat b/tests/automatic/authorizer/check_authorize/check_windows.bat
index 3936894d10fa6c4b9e6359cf83edee0ee539e335..ffa16088800e8cbd0dbd2b070e4cefd559c7fca2 100644
--- a/tests/automatic/authorizer/check_authorize/check_windows.bat
+++ b/tests/automatic/authorizer/check_authorize/check_windows.bat
@@ -5,18 +5,17 @@ start /B "" "%full_name%" -config settings.json
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-mkdir asap3\petra3\gpfs\p01\2019\comissioning\c20180508-000-COM20181
+mkdir asap3\petra3\gpfs\p00\2019\comissioning\c20180508-000-COM20181
 mkdir beamline\p07\current
 copy beamtime-metadata* beamline\p07\current\ /y
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr c20180508-000-COM20181  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%auto%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr p01  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"processed%%c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr c20180508-000-COM20181  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"processed%%c20180508-000-COM20181%%auto%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr p00  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"processed%%c20180508-000-COM20181%%%%stream%%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr stream  || goto :error
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"c20180508-000-COM20181%%%%stream%%wrong\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 401  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%%c20180508-000-COM20181%%%%stream%%wrong\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 401  || goto :error
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"auto%%p07%%stream%%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%%auto%%p07%%stream%%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1 b/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1
deleted file mode 100644
index 3a6a77dbfe88ddfd9c29a75c37e9c1b8978f2663..0000000000000000000000000000000000000000
--- a/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1
+++ /dev/null
@@ -1 +0,0 @@
-p01
\ No newline at end of file
diff --git a/tests/automatic/authorizer/check_authorize/settings.json.in b/tests/automatic/authorizer/check_authorize/settings.json.in
index c344b09099def4170c924dab9864180d8a7f72eb..51d5bfc3df247a96a4382d942bdbd372bb2dd497 100644
--- a/tests/automatic/authorizer/check_authorize/settings.json.in
+++ b/tests/automatic/authorizer/check_authorize/settings.json.in
@@ -3,8 +3,11 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "IpBeamlineMappingFolder":"@BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key"
-}
-
-
+  "SecretFile":"auth_secret.key",
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
+}
\ No newline at end of file
diff --git a/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py b/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
index 0f048aeed2d6f2236d10dbb2197490b5cb038cfc..f52bcde652a18ad1864bd6805c8fcd6315959355 100644
--- a/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
+++ b/tests/automatic/bug_fixes/error-sending-data-using-callback-method/bugfix_callback.py
@@ -25,7 +25,7 @@ class AsapoSender:
     def _callback(self, header, err):
     	print ("hello self callback")
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 producer.set_log_level("debug")
 
 sender = AsapoSender(producer)
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh b/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
index a572806f4eca1327b2912ed74e68a74db5fe3e10..ea36e9886b6209ddef6e162e6072b0d3fe6c6bb2 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_linux.sh
@@ -21,11 +21,12 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
+mkdir -p /tmp/asapo/test_in/test1
 
 Cleanup() {
     echo cleanup
     rm -rf ${receiver_root_folder}
+    rm -rf /tmp/asapo/test_in
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -40,7 +41,7 @@ nomad run receiver_${network_type}.nmd
 nomad run discovery.nmd
 
 sleep 1
-
+mkdir  /tmp/asapo/test_in/processed
 #producer
 mkdir -p ${receiver_folder}
 $producer_bin test.json &> output &
@@ -48,13 +49,13 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
+echo hello > /tmp/asapo/test_in/processed/file1
 sleep 1
 nomad stop receiver
 sleep 1
 nomad run receiver_${network_type}.nmd
 
-echo hello > /tmp/asapo/test_in/test1/file1
+echo hello > /tmp/asapo/test_in/processed/file1
 sleep 1
 
 kill -s INT $producerid
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
index 815dffc5431972fd07bd4d8be6c725b7de66a483..38dfd3b2d8f3c1ec0ee30c633d01f3d3f3ff869e 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
@@ -20,14 +20,13 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\file1
+echo hello > c:\tmp\asapo\test_in\processed\file2
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
@@ -39,7 +38,7 @@ ping 1.0.0.0 -n 3 -w 100 > nul
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-echo hello > c:\tmp\asapo\test_in\test1\file3
+echo hello > c:\tmp\asapo\test_in\processed\file3
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -58,8 +57,7 @@ exit /b 1
 call stop_services.bat
 
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in b/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
index d74bd52ebcf85d75b4e1533dd0288b71337dc4b2..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/consumer/consumer_api/consumer_api.cpp b/tests/automatic/consumer/consumer_api/consumer_api.cpp
index b18fa79469171bb1cc4d03855f20d1eb564f291a..edd50b493c912fd981deeaa2df356e12d7b946d0 100644
--- a/tests/automatic/consumer/consumer_api/consumer_api.cpp
+++ b/tests/automatic/consumer/consumer_api/consumer_api.cpp
@@ -237,7 +237,7 @@ void TestDataset(const std::unique_ptr<asapo::DataBroker>& broker, const std::st
 void TestAll(const Args& args) {
     asapo::Error err;
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, ".", true,
-                  asapo::SourceCredentials{args.run_name, "", "", args.token}, &err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.run_name, "", "", args.token}, &err);
     if (err) {
         std::cout << "Error CreateServerBroker: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in b/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
index 7b88592e44067c5c1f3d3f2cca3c849f7313f8e5..d4916f87e39ca954a28ffc6e72199acd0d046c3f 100644
--- a/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
+++ b/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
@@ -4,5 +4,11 @@
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
   "SecretFile":"auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
 }
diff --git a/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
index 33ff6eccc35866c85a17c8948d1159019d03513f..ec7e791c5d65cf3a75fc743618e0b2000abb3764 100644
--- a/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
+++ b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
@@ -53,7 +53,7 @@ Args GetArgs(int argc, char* argv[]) {
 
 void TestAll(const Args& args) {
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, "dummy", true, asapo::SourceCredentials{args.run_name, "", "", args.token}, &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, "dummy", true, asapo::SourceCredentials{asapo::SourceType::kProcessed,args.run_name, "", "", args.token}, &err);
     if (err) {
         std::cout << "Error CreateServerBroker: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in b/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
index 7b88592e44067c5c1f3d3f2cca3c849f7313f8e5..d4916f87e39ca954a28ffc6e72199acd0d046c3f 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
+++ b/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
@@ -4,5 +4,11 @@
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
   "SecretFile":"auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
 }
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
index 45527f88ef291784b6ffb62d433a4b40063602b5..04c9032d97bcfaf36dbce16a95ce6047331fe04b 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
+++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
@@ -32,7 +32,7 @@ int main(int argc, char* argv[]) {
     std::string authorize_request = "{\"Folder\":\"" + args.folder + "\",\"BeamtimeId\":\"aaa\",\"Token\":\"" + token +
                                     "\"}";
     asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri_authorizer, "", true, asapo::SourceCredentials{"", "", "", ""}, &err);
+    auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri_authorizer, "", true, asapo::SourceCredentials{asapo::SourceType::kProcessed,"", "", "", ""}, &err);
     auto server_broker = static_cast<asapo::ServerDataBroker*>(broker.get());
     M_AssertEq(nullptr, err);
 
diff --git a/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in b/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
index 7b88592e44067c5c1f3d3f2cca3c849f7313f8e5..d4916f87e39ca954a28ffc6e72199acd0d046c3f 100644
--- a/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
+++ b/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
@@ -4,5 +4,11 @@
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
   "SecretFile":"auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
 }
diff --git a/tests/automatic/full_chain/CMakeLists.txt b/tests/automatic/full_chain/CMakeLists.txt
index 6110eb7f0293e7b29cf53dc05dc902b44fd83a78..958cfce3006120cbf31e79ac80d64ee7d64cd808 100644
--- a/tests/automatic/full_chain/CMakeLists.txt
+++ b/tests/automatic/full_chain/CMakeLists.txt
@@ -7,6 +7,7 @@ add_subdirectory(simple_chain_metadata)
 add_subdirectory(two_beamlines)
 add_subdirectory(two_streams)
 add_subdirectory(simple_chain_filegen)
+add_subdirectory(simple_chain_raw)
 add_subdirectory(simple_chain_filegen_batches)
 add_subdirectory(simple_chain_filegen_multisource)
 add_subdirectory(simple_chain_filegen_readdata_cache)
diff --git a/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp b/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
index 0a4a0b910d302b9f433bd1d2979f4da59dd661bf..4b93bcd456ddd61246de943fa94eecbc79d45900 100644
--- a/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
+++ b/tests/automatic/full_chain/send_recv_substreams/send_recv_substreams.cpp
@@ -36,7 +36,7 @@ void ProcessAfterSend(asapo::RequestCallbackPayload payload, asapo::Error err) {
 
 BrokerPtr CreateBrokerAndGroup(const Args& args, Error* err) {
     auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, ".", true,
-                  asapo::SourceCredentials{args.beamtime_id, "", "", args.token}, err);
+                  asapo::SourceCredentials{asapo::SourceType::kProcessed,args.beamtime_id, "", "", args.token}, err);
     if (*err) {
         return nullptr;
     }
@@ -56,7 +56,8 @@ ProducerPtr CreateProducer(const Args& args) {
     asapo::Error err;
     auto producer = asapo::Producer::Create(args.server, 1,
                                             asapo::RequestHandlerType::kTcp,
-                                            asapo::SourceCredentials{args.beamtime_id, "", "", args.token }, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,
+                                                                     args.beamtime_id, "", "", args.token }, 60, &err);
     if(err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py b/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
index c35fb9c0b8757bb46c2b573c0ccffdd37b03045d..f55f12ae6734b8c5ea81afcaa18aa42b352175de 100644
--- a/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
+++ b/tests/automatic/full_chain/send_recv_substreams_python/send_recv_substreams.py
@@ -26,7 +26,7 @@ def callback(header,err):
 source, beamtime, token = sys.argv[1:]
 
 broker = asapo_consumer.create_server_broker(source,".",True, beamtime,"",token,timeout)
-producer  = asapo_producer.create_producer(source,beamtime,'auto', "", token, 1, 600)
+producer  = asapo_producer.create_producer(source,'processed',beamtime,'auto', "", token, 1, 600)
 producer.set_log_level("debug")
 
 group_id  = broker.generate_group_id()
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
index 4e88475f4060545fb191dbdc280f7f8009bb1257..6720558682eecbc8ee9d4a81d3b8f522fee875bc 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
@@ -21,15 +21,13 @@ facility=test_facility
 year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
-
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -56,15 +54,19 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file2
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
+
+
+echo hello > /tmp/asapo/test_in/processed/test1/file1
+echo hello > /tmp/asapo/test_in/processed/test1/file2
+echo hello > /tmp/asapo/test_in/processed/test2/file1
 
 echo "Start consumer in metadata only mode"
 $consumer_bin ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 1 | tee /dev/stderr out
 grep "Processed 3 file(s)" out
 grep -i "Using connection type: No connection" out
 
-test ! -f /tmp/asapo/test_in/test1/file1
-test ! -f /tmp/asapo/test_in/test1/file2
-test ! -f /tmp/asapo/test_in/test2/file2
+test ! -f /tmp/asapo/test_in/processed/test1/file1
+test ! -f /tmp/asapo/test_in/processed/test1/file2
+test ! -f /tmp/asapo/test_in/processed/test2/file1
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
index 7de08baad3f1b7f826e60109449a8c3e7e7d30f1..c4dd96c9c375d5d8032dff97a32ba3a76c3c96bb 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
@@ -20,15 +20,18 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+
+
+echo hello > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -47,8 +50,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen/test.json.in b/tests/automatic/full_chain/simple_chain_filegen/test.json.in
index 3c77ba335ac934c2b4ce32e9f531e21b56058709..eddefac2b2a700bbdd4703bd7221b641a99216a1 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
index a9a31bb2e782ccdccfdf737fb3ee48e4e58f4abb..ef67e1cdd2e9eb132443449e291d91cb27121ea4 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
@@ -22,14 +22,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
+
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -50,6 +50,10 @@ nomad run broker.nmd
 
 sleep 1
 
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
+
+#producer
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin test.json &
@@ -57,9 +61,9 @@ producerid=`echo $!`
 
 sleep 1
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file2
+echo hello > /tmp/asapo/test_in/processed/test1/file1
+echo hello > /tmp/asapo/test_in/processed/test1/file2
+echo hello > /tmp/asapo/test_in/processed/test2/file1
 
 echo "Start consumer in metadata only mode"
 $consumer_bin ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 2000 1 1 | tee out
@@ -67,6 +71,6 @@ grep "Processed 1 dataset(s)" out
 grep "with 3 file(s)" out
 grep -i "Using connection type: No connection" out
 
-test -f /tmp/asapo/test_in/test1/file1
-test -f /tmp/asapo/test_in/test1/file2
-test -f /tmp/asapo/test_in/test2/file2
+test -f /tmp/asapo/test_in/processed/test1/file1
+test -f /tmp/asapo/test_in/processed/test1/file2
+test -f /tmp/asapo/test_in/processed/test2/file1
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
index 5c5e7017ff72cda4c99cce9c5f30753bbdb60bfc..4ed07ca89add90ce6d244a04f6d0eca5af8f30c4 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
@@ -20,15 +20,16 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file2
+echo hello > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -48,8 +49,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
index f1323b8ee05f6d835a632779eb362354d9595b74..0b760c2ea8d92034668462fb60320c45c9789b2b 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":false,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
index bcdbfa2c31ab8be4bda172aafd3bc70b86c620c6..5283a0d682e03bfacdd63bf2439b5c74e5ffe0a1 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
@@ -21,15 +21,13 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
-
 Cleanup() {
     echo cleanup
-    kill $producerid1
-    kill $producerid2
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid1
+    kill -9 $producerid2
+    rm -rf /tmp/asapo/test_in1
+    rm -rf /tmp/asapo/test_in2
+    rm -rf ${receiver_folder}
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -50,6 +48,10 @@ nomad run broker.nmd
 
 sleep 1
 
+
+mkdir -p /tmp/asapo/test_in1/processed
+mkdir -p /tmp/asapo/test_in2/processed
+
 mkdir -p ${receiver_folder}
 
 echo "Start producer 1"
@@ -61,11 +63,13 @@ $producer_bin test2.json &
 producerid2=`echo $!`
 
 sleep 1
+mkdir -p /tmp/asapo/test_in1/processed/test1
+mkdir -p /tmp/asapo/test_in2/processed/test2
 
-echo hello > /tmp/asapo/test_in/test1/file1
-echo hello > /tmp/asapo/test_in/test1/file2
-echo hello > /tmp/asapo/test_in/test2/file1
-echo hello > /tmp/asapo/test_in/test2/file2
+echo hello > /tmp/asapo/test_in1/processed/test1/file1
+echo hello > /tmp/asapo/test_in1/processed/test1/file2
+echo hello > /tmp/asapo/test_in2/processed/test2/file1
+echo hello > /tmp/asapo/test_in2/processed/test2/file2
 
 echo "Start consumer in metadata only mode"
 $consumer_bin ${proxy_address} $network_type ${receiver_folder} ${beamtime_id} 2 $token 2000 1 1 | tee out
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
index edb87126c1d87a8b6f5ba547fb793b6acfb5bb5c..647ebecde4e377682f22f3437b2342abf0157c6e 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
@@ -19,8 +19,8 @@ echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_det
 call start_services.bat
 
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in1\processed
+mkdir  c:\tmp\asapo\test_in2\processed
 
 REM producer1
 start /B "" "%1" test1.json
@@ -30,11 +30,13 @@ start /B "" "%1" test2.json
 
 
 ping 1.0.0.0 -n 3 -w 100 > nul
+mkdir  c:\tmp\asapo\test_in1\processed\test1
+mkdir  c:\tmp\asapo\test_in2\processed\test2
 
-echo hello > c:\tmp\asapo\test_in\test1\file1
-echo hello > c:\tmp\asapo\test_in\test1\file2
-echo hello > c:\tmp\asapo\test_in\test2\file1
-echo hello > c:\tmp\asapo\test_in\test2\file2
+echo hello > c:\tmp\asapo\test_in1\processed\test1\file1
+echo hello > c:\tmp\asapo\test_in1\processed\test1\file2
+echo hello > c:\tmp\asapo\test_in2\processed\test2\file1
+echo hello > c:\tmp\asapo\test_in2\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -55,8 +57,8 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in1
+rmdir /S /Q c:\tmp\asapo\test_in2
 Taskkill /IM "%producer_short_name%" /F
 
 del /f token
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
index eadb0bb3eeb9603bf30bf3e0d0de13376095de82..09aa803aa41948346be1f951e85383364f6827d2 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/test.json.in
@@ -5,8 +5,8 @@
  "Mode":"tcp",
  "NThreads":1,
  "LogLevel":"debug",
- "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test@ID@"],
+ "RootMonitoredFolder":"@ROOT_PATH@test_in@ID@",
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
index 05b1e16e160b9bc409af4defc7b985306ff6e349..b57cbbbad09a957ee9cab06d77a88f5fbdd7101c 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
@@ -22,15 +22,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
     influx -execute "drop database ${monitor_database_name}"
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -57,10 +56,12 @@ $producer_bin test.json &
 producerid=`echo $!`
 
 sleep 1
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
 
-echo -n hello1 > /tmp/asapo/test_in/test1/file1
-echo -n hello2 > /tmp/asapo/test_in/test1/file2
-echo -n hello3 > /tmp/asapo/test_in/test2/file2
+echo -n hello1 > /tmp/asapo/test_in/processed/test1/file1
+echo -n hello2 > /tmp/asapo/test_in/processed/test1/file2
+echo -n hello3 > /tmp/asapo/test_in/processed/test2/file1
 
 echo "Start consumer in $network_type mode"
 $consumer_bin ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 | tee out.txt
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
index dc674898e8a6de37125f77147c700529e6628394..594e17b724d28689bc49c1ac898267b65a5b0ee4 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
@@ -20,15 +20,16 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello1 > c:\tmp\asapo\test_in\test1\file1
-echo hello2 > c:\tmp\asapo\test_in\test1\file2
-echo hello3 > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+echo hello1 > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello2 > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello3 > c:\tmp\asapo\test_in\processed\test2\file2
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -51,8 +52,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 del /f out.txt
 
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
index 9addfcceb52b30c4449268cb140fa04700306a59..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
index 8b65535b5995be56fa9a0232e750c8a6e6c8651a..6a571d4d27ce9b83586664d0b3ec60a80a4596b3 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
@@ -22,14 +22,14 @@ year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
 
-mkdir -p /tmp/asapo/test_in/test1/
-mkdir -p /tmp/asapo/test_in/test2/
+mkdir -p /tmp/asapo/test_in/processed
 
 Cleanup() {
     echo cleanup
-    kill $producerid
-    rm -rf /tmp/asapo/test_in/test1
-    rm -rf /tmp/asapo/test_in/test2
+    kill -9 $producerid
+    rm -rf /tmp/asapo/test_in
+    rm -rf ${receiver_folder}
+    influx -execute "drop database ${monitor_database_name}"
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop receiver
@@ -56,10 +56,12 @@ $producer_bin test.json &
 producerid=`echo $!`
 
 sleep 1
+mkdir  /tmp/asapo/test_in/processed/test1
+mkdir  /tmp/asapo/test_in/processed/test2
 
-echo -n hello1 > /tmp/asapo/test_in/test1/file1
-echo -n hello2 > /tmp/asapo/test_in/test1/file2
-echo -n hello3 > /tmp/asapo/test_in/test2/file2
+echo -n hello1 > /tmp/asapo/test_in/processed/test1/file1
+echo -n hello2 > /tmp/asapo/test_in/processed/test1/file2
+echo -n hello3 > /tmp/asapo/test_in/processed/test2/file1
 
 echo "Start consumer in $network_type mode"
 $consumer_bin ${proxy_address} ${receiver_folder} ${beamtime_id} 2 $token 1000 0 | tee out.txt
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
index 68347e567d6ee78de314efea94f6985f1c968898..0c00c52570184c1b88b2fa41d1e2df57f2142269 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
@@ -20,15 +20,17 @@ call start_services.bat
 
 REM producer
 mkdir %receiver_folder%
-mkdir  c:\tmp\asapo\test_in\test1
-mkdir  c:\tmp\asapo\test_in\test2
+mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
 ping 1.0.0.0 -n 3 -w 100 > nul
 
-echo hello1 > c:\tmp\asapo\test_in\test1\file1
-echo hello2 > c:\tmp\asapo\test_in\test1\file2
-echo hello3 > c:\tmp\asapo\test_in\test2\file2
+mkdir  c:\tmp\asapo\test_in\processed\test1
+mkdir  c:\tmp\asapo\test_in\processed\test2
+echo hello1 > c:\tmp\asapo\test_in\processed\test1\file1
+echo hello2 > c:\tmp\asapo\test_in\processed\test1\file2
+echo hello3 > c:\tmp\asapo\test_in\processed\test2\file2
+
 
 ping 1.0.0.0 -n 10 -w 100 > nul
 
@@ -52,8 +54,7 @@ exit /b 1
 :clean
 call stop_services.bat
 rmdir /S /Q %receiver_root_folder%
-rmdir /S /Q c:\tmp\asapo\test_in\test1
-rmdir /S /Q c:\tmp\asapo\test_in\test2
+rmdir /S /Q c:\tmp\asapo\test_in
 Taskkill /IM "%producer_short_name%" /F
 del /f out.txt
 
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
index 9addfcceb52b30c4449268cb140fa04700306a59..ed41c425ce44f356fecb72e6c17820cae9ef7b69 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/test.json.in
@@ -6,7 +6,7 @@
  "NThreads":1,
  "LogLevel":"debug",
  "RootMonitoredFolder":"@ROOT_PATH@test_in",
- "MonitoredSubFolders":["test1","test2"],
+ "MonitoredSubFolders":["processed"],
  "IgnoreExtensions":["tmp"],
  "WhitelistExtensions":[],
  "RemoveAfterSend":true,
diff --git a/tests/automatic/full_chain/simple_chain_raw/CMakeLists.txt b/tests/automatic/full_chain/simple_chain_raw/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1cf8ce6f58bbe85a5c752c1055f742f958d137d0
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_raw/CMakeLists.txt
@@ -0,0 +1,23 @@
+set(TARGET_NAME full_chain_simple_chain_raw)
+
+################################
+# Testing
+################################
+prepare_asapo()
+
+file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/asap3 ASAP3_FOLDER )
+file(TO_NATIVE_PATH ${CMAKE_CURRENT_BINARY_DIR}/beamline CURRENT_BEAMLINES_FOLDER )
+
+file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/ip_bl_mapping BEAMLINES_FOLDER )
+if (WIN32)
+    string(REPLACE "\\" "\\\\" ASAP3_FOLDER "${ASAP3_FOLDER}")
+    string(REPLACE "\\" "\\\\" BEAMLINES_FOLDER "${BEAMLINES_FOLDER}")
+    string(REPLACE "\\" "\\\\" CURRENT_BEAMLINES_FOLDER "${CURRENT_BEAMLINES_FOLDER}")
+
+endif()
+
+configure_file(beamtime-metadata-11111111.json beamtime-metadata-11111111.json COPYONLY)
+
+configure_file(${CMAKE_CURRENT_SOURCE_DIR}/settings.json.tpl.in authorizer.json.tpl @ONLY)
+
+add_script_test("${TARGET_NAME}" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME>" nomem)
diff --git a/tests/automatic/full_chain/simple_chain_raw/beamtime-metadata-11111111.json b/tests/automatic/full_chain/simple_chain_raw/beamtime-metadata-11111111.json
new file mode 100644
index 0000000000000000000000000000000000000000..9270dbbefe4a12e80989c7c2faa4cc6443aee784
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_raw/beamtime-metadata-11111111.json
@@ -0,0 +1,36 @@
+{
+  "applicant": {
+    "email": "test",
+    "institute": "test",
+    "lastname": "test",
+    "userId": "1234",
+    "username": "test"
+  },
+  "beamline": "p07",
+  "beamline_alias": "P07",
+  "beamtimeId": "11111111",
+  "contact": "None",
+  "core-path": "/asap3/petra3/gpfs/p07/2020/data/11111111",
+  "event-end": "2020-03-03 09:00:00",
+  "event-start": "2020-03-02 09:00:00",
+  "facility": "PETRA III",
+  "generated": "2020-02-22 22:37:16",
+  "pi": {
+    "email": "test",
+    "institute": "test",
+    "lastname": "test",
+    "userId": "14",
+    "username": "test"
+  },
+  "proposalId": "12345678",
+  "proposalType": "H",
+  "title": "In-House Research (P07)",
+  "unixId": "None",
+  "users": {
+    "door-db": [
+      "test"
+    ],
+    "special": [],
+    "unknown": []
+  }
+}
diff --git a/tests/automatic/full_chain/simple_chain_raw/check_linux.sh b/tests/automatic/full_chain/simple_chain_raw/check_linux.sh
new file mode 100644
index 0000000000000000000000000000000000000000..f56ee8ca5627333538906b17b4fcdac9de30dc97
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_raw/check_linux.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+
+set -e
+
+trap Cleanup EXIT
+
+beamtime_id=11111111
+token=`$3 token -secret auth_secret.key $beamtime_id`
+
+monitor_database_name=db_test
+proxy_address=127.0.0.1:8400
+
+mkdir -p asap3/petra3/gpfs/p07/2019/data/11111111
+cp beamtime-metadata* asap3/petra3/gpfs/p07/2019/data/11111111
+mkdir -p beamline/p07/current
+cp beamtime-metadata* beamline/p07/current/
+
+Cleanup() {
+    echo cleanup
+    nomad stop nginx
+    nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
+    nomad stop receiver
+    nomad stop discovery
+    nomad stop broker
+    nomad stop authorizer
+    rm -rf out asap3 beamline
+    echo "db.dropDatabase()" | mongo ${beamtime_id}_detector
+    influx -execute "drop database ${monitor_database_name}"
+}
+
+#echo "db.data_${beamtime_id}_detector.insert({dummy:1})" | mongo data_${beamtime_id}_detector
+echo "db.dropDatabase()" | mongo ${beamtime_id}_detector
+
+
+nomad run nginx.nmd
+nomad run authorizer.nmd
+nomad run receiver_tcp.nmd
+nomad run discovery.nmd
+nomad run broker.nmd
+
+sleep 1
+
+#producer
+$1 localhost:8400 ${beamtime_id} 100 10 4 100 100
+
+
+$2 ${proxy_address} "__" ${beamtime_id} 2 $token 5000 1 > out
+cat out
+cat out   | grep "Processed 10 file(s)"
+ls beamline/p07/current/raw/1 | tee /dev/stderr | grep 1
diff --git a/tests/automatic/full_chain/simple_chain_raw/check_windows.bat b/tests/automatic/full_chain/simple_chain_raw/check_windows.bat
new file mode 100644
index 0000000000000000000000000000000000000000..aaa10a220723cc8dcaba483f17922b0158fb5ad1
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_raw/check_windows.bat
@@ -0,0 +1,42 @@
+SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
+SET beamtime_id=11111111
+
+mkdir asap3\petra3\gpfs\p07\2019\data\11111111
+mkdir beamline\p07\current
+copy beamtime-metadata* beamline\p07\current\ /y
+copy beamtime-metadata* asap3\petra3\gpfs\p07\2019\data\11111111\ /y
+
+
+"%3" token -secret auth_secret.key %beamtime_id% > token
+set /P token=< token
+
+set proxy_address="127.0.0.1:8400"
+
+call start_services.bat
+
+REM producer
+mkdir %receiver_folder%
+start /B "" "%1" %proxy_address% %beamtime_id% 100 10 4 100 100
+ping 1.0.0.0 -n 1 -w 100 > nul
+
+REM consumer
+"%2" %proxy_address% "_" %beamtime_id% 2 %token% 5000  1 > out.txt
+type out.txt
+findstr /i /l /c:"Processed 10 file(s)"  out.txt || goto :error
+if not exist beamline\p07\current\raw\1  goto :error
+
+goto :clean
+
+:error
+call :clean
+exit /b 1
+
+:clean
+call stop_services.bat
+rmdir /S /Q asap3
+rmdir /S /Q beamline
+del /f token
+del /f out.txt
+echo db.dropDatabase() | %mongo_exe% %beamtime_id%_detector
+
+
diff --git a/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in b/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in
new file mode 100644
index 0000000000000000000000000000000000000000..4aecbe840466b510e46c567c4871bd892b110bcc
--- /dev/null
+++ b/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in
@@ -0,0 +1,13 @@
+{
+  "Port": {{ env "NOMAD_PORT_authorizer" }},
+  "LogLevel":"debug",
+  "RootBeamtimesFolder":"@ASAP3_FOLDER@",
+  "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
+  "SecretFile":"auth_secret.key",
+  "Ldap":
+    {
+        "Uri" : "ldap://localhost:389",
+        "BaseDn" : "ou=rgy,o=desy,c=de",
+        "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+    }
+}
\ No newline at end of file
diff --git a/tests/automatic/full_chain/two_streams/check_windows.bat b/tests/automatic/full_chain/two_streams/check_windows.bat
index fd2b9268da99a3c4ab0f715ae04b05fc0d095895..43ef2ab6c529c154d9fe755caec622be5cdf445a 100644
--- a/tests/automatic/full_chain/two_streams/check_windows.bat
+++ b/tests/automatic/full_chain/two_streams/check_windows.bat
@@ -32,8 +32,6 @@ findstr /i /l /c:"Processed 1000 file(s)"  out1.txt || goto :error
 type out2.txt
 findstr /i /l /c:"Processed 900 file(s)"  out2.txt || goto :error
 
-
-
 goto :clean
 
 :error
diff --git a/tests/automatic/producer/aai/producer_aai.py b/tests/automatic/producer/aai/producer_aai.py
index 54bc6f7758aa18a9e72de3fa91005df18037581a..1734f757f158fddc4692063cd9a23c4dae8c39fd 100644
--- a/tests/automatic/producer/aai/producer_aai.py
+++ b/tests/automatic/producer/aai/producer_aai.py
@@ -26,19 +26,19 @@ def callback(header,err):
     lock.release()
 
 
-producer  = asapo_producer.create_producer(endpoint,'auto',beamline, stream, token, nthreads, 60)
+producer  = asapo_producer.create_producer(endpoint,'processed','auto',beamline, stream, token, nthreads, 60)
 
 producer.set_log_level("debug")
 
 #send single file
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 
 producer.wait_requests_finished(10000)
 
 time.sleep(2)
 
 #send single file to other beamtime - should be warning on duplicated request (same beamtime, no reauthorization)
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 producer.wait_requests_finished(10000)
 
 
@@ -54,7 +54,7 @@ with open(fname, 'w') as outfile:
 time.sleep(2)
 
 #send single file to other beamtime - now ok since receiver authorization timed out
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+producer.send_file(1, local_path = "./file1", exposed_path = "processed/"+stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 
 producer.wait_requests_finished(10000)
 
diff --git a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
index 5e00ea491ea6a11a9e999003a37fae3d42e9381e..d8951bf79f91d7ae1fcbfb809a420a19f71a27ea 100644
--- a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
+++ b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
@@ -69,7 +69,8 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
     auto producer = asapo::Producer::Create(args.discovery_service_endpoint, 1,
                                             args.mode == 0 ? asapo::RequestHandlerType::kTcp
                                             : asapo::RequestHandlerType::kFilesystem,
-                                            asapo::SourceCredentials{args.beamtime_id, "", "", ""}, 60, &err);
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,
+                                                                     args.beamtime_id, "", "", ""}, 60, &err);
     if (err) {
         std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
         exit(EXIT_FAILURE);
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index 4d063364180f3438f35397fd6dea25a65b862ea1..389c2350a74845b0f72b3f059b81d3fa8d6fd9c8 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -5,6 +5,7 @@ import sys
 import time
 import numpy as np
 import threading
+
 lock = threading.Lock()
 
 stream = sys.argv[1]
@@ -14,66 +15,73 @@ endpoint = sys.argv[3]
 token = ""
 nthreads = 8
 
-def assert_eq(val,expected,name):
-    print ("asserting eq for "+name)
+
+def assert_eq(val, expected, name):
+    print("asserting eq for " + name)
     if val != expected:
-        print ("error at "+name)
-        print ('val: ', val,' expected: ',expected)
+        print("error at " + name)
+        print('val: ', val, ' expected: ', expected)
         sys.exit(1)
 
-def callback(header,err):
-    lock.acquire() # to print
-    if isinstance(err,asapo_producer.AsapoServerWarning):
-        print("successfuly sent, but with warning from server: ",header,err)
+
+def callback(header, err):
+    lock.acquire()  # to print
+    if isinstance(err, asapo_producer.AsapoServerWarning):
+        print("successfuly sent, but with warning from server: ", header, err)
     elif err is not None:
-        print("could not sent: ",header,err)
+        print("could not sent: ", header, err)
     else:
-        print ("successfuly sent: ",header)
+        print("successfuly sent: ", header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads,60)
+
+producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', stream, token, nthreads, 60)
 
 producer.set_log_level("debug")
 
-#send single file
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
+# send single file
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key":"test_val"}', callback=callback)
 
-#send single file without callback
-producer.send_file(10, local_path = "./file1", exposed_path = stream+"/"+"file10", user_meta = '{"test_key":"test_val"}',callback=None)
+# send single file without callback
+producer.send_file(10, local_path="./file1", exposed_path="processed/" + stream + "/" + "file10",
+                   user_meta='{"test_key":"test_val"}', callback=None)
 
-#send subsets
-producer.send_file(2, local_path = "./file1", exposed_path = stream+"/"+"file2",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
-producer.send_file(3, local_path = "./file1", exposed_path = stream+"/"+"file3",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
+# send subsets
+producer.send_file(2, local_path="./file1", exposed_path="processed/" + stream + "/" + "file2", subset=(2, 2),
+                   user_meta='{"test_key":"test_val"}', callback=callback)
+producer.send_file(3, local_path="./file1", exposed_path="processed/" + stream + "/" + "file3", subset=(2, 2),
+                   user_meta='{"test_key":"test_val"}', callback=callback)
 
-#send meta only
-producer.send_file(3, local_path = "./not_exist",exposed_path = "./whatever",
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send meta only
+producer.send_file(3, local_path="./not_exist", exposed_path="./whatever",
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-data = np.arange(10,dtype=np.float64)
+data = np.arange(10, dtype=np.float64)
 
-#send data from array
-producer.send_data(4, stream+"/"+"file5",data,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+# send data from array
+producer.send_data(4, "processed/" + stream + "/" + "file5", data,
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
-#send data from string
-producer.send_data(5, stream+"/"+"file6",b"hello",
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+# send data from string
+producer.send_data(5, "processed/" + stream + "/" + "file6", b"hello",
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
-#send metadata only
-producer.send_data(6, stream+"/"+"file7",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send metadata only
+producer.send_data(6, "processed/" + stream + "/" + "file7", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-#send single file/wrong filename
-producer.send_file(1, local_path = "./file2", exposed_path = stream+"/"+"file1", callback = callback)
+# send single file/wrong filename
+producer.send_file(1, local_path="./file2", exposed_path="processed/" + stream + "/" + "file1", callback=callback)
 
 x = np.array([[1, 2, 3], [4, 5, 6]], np.float32)
-producer.send_data(8, stream+"/"+"file8",x,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+producer.send_data(8, "processed/" + stream + "/" + "file8", x,
+                   ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 
 try:
     x = x.T
-    producer.send_data(8, stream+"/"+"file8",x,
-                         ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
+    producer.send_data(8, "processed/" + stream + "/" + "file8", x,
+                       ingest_mode=asapo_producer.DEFAULT_INGEST_MODE, callback=callback)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
@@ -81,55 +89,52 @@ else:
     sys.exit(1)
 
 try:
-    producer.send_file(0, local_path = "./not_exist",exposed_path = "./whatever",
-                       ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+    producer.send_file(0, local_path="./not_exist", exposed_path="./whatever",
+                       ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
     print("should be error sending id 0 ")
     sys.exit(1)
 
-#send to another substream
-producer.send_data(1, stream+"/"+"file9",None,
-                   ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, substream="stream", callback = callback)
+# send to another substream
+producer.send_data(1, "processed/" + stream + "/" + "file9", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, substream="stream", callback=callback)
 
 # wait normal requests finished before sending duplicates
 
 producer.wait_requests_finished(50000)
 
-#send single file once again
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
-#send metadata only once again
-producer.send_data(6, stream+"/"+"file7",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send single file once again
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key":"test_val"}', callback=callback)
+# send metadata only once again
+producer.send_data(6, "processed/" + stream + "/" + "file7", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
-#send same id different data
-producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key1":"test_val"}', callback = callback)#send same id different data
-producer.send_data(6, stream+"/"+"file8",None,
-                         ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
+# send same id different data
+producer.send_file(1, local_path="./file1", exposed_path="processed/" + stream + "/" + "file1",
+                   user_meta='{"test_key1":"test_val"}', callback=callback)  # send same id different data
+producer.send_data(6, "processed/" + stream + "/" + "file8", None,
+                   ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
 
 producer.wait_requests_finished(50000)
 n = producer.get_requests_queue_size()
-assert_eq(n,0,"requests in queue")
+assert_eq(n, 0, "requests in queue")
 
 info = producer.stream_info()
-assert_eq(info['lastId'],10,"last id")
+assert_eq(info['lastId'], 10, "last id")
 
 info = producer.stream_info('stream')
-assert_eq(info['lastId'],1,"last id from different substream")
-
+assert_eq(info['lastId'], 1, "last id from different substream")
 
 # create with error
 try:
-    producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, 0,0)
+    producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', stream, token, 0, 0)
 except asapo_producer.AsapoWrongInputError as e:
     print(e)
 else:
     print("should be error")
     sys.exit(1)
 
-
-print ('Finished successfully')
-
-
-
+print('Finished successfully')
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
index 0f581ea7ed2b9a7b46c8f8cf3e48de99d8b6f01b..b8fb9e196f7861b6d44c27341240c5ace7ca371e 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
@@ -38,10 +38,10 @@ nomad run discovery.nmd
 
 mkdir -p ${receiver_folder}
 
-$1 localhost:8400 ${beamtime_id} 100 1 1  0 30 3
+$1 localhost:8400 ${beamtime_id} 100 1 1 0 30 3
 
-ls -ln ${receiver_folder}/1_1 | awk '{ print $5 }'| grep 100000
-ls -ln ${receiver_folder}/1_2 | awk '{ print $5 }'| grep 100000
-ls -ln ${receiver_folder}/1_3 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_1 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_2 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1_3 | awk '{ print $5 }'| grep 100000
 
 echo 'db.data_default.find({"images._id":{$gt:0}},{"images.name":1})' | mongo asapo_test_detector | grep 1_1 | grep 1_2 | grep 1_3
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
index c675100ba2f845d2329cb97c658f9aa4e38a34de..7ae8f9792ea89892dcb89ae446dcf94f71d21614 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
@@ -15,13 +15,13 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_1') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_2') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_2') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1_3') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1_3') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
 
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
index 1d703fe89d1fcbfa8f3e82be863571a959ab8731..c0d3435cec2d3be4b9a1ba1b9d72f11c3db5c37a 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
@@ -39,6 +39,6 @@ sleep 1
 
 $1 localhost:8400 ${beamtime_id} 100 1 1  0 30
 
-ls -ln ${receiver_folder}/1 | awk '{ print $5 }'| grep 100000
+ls -ln ${receiver_folder}/processed/1 | awk '{ print $5 }'| grep 100000
 
 $1 localhost:8400 wrong_beamtime_id 100 1 1 0 1 2>&1 | tee /dev/stderr | grep "authorization"
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
index 75c4b4c2a45e4c6b5c3b1421bd6dd0e33f4dc5b9..96226b7b346196ae8da728587b8db1982c058454 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
@@ -15,7 +15,7 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
 "%1" localhost:8400 wrong_id 100 1 1 0 2 2>1 | findstr /c:"authorization"  || goto :error
diff --git a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
index 105baa23426235a2078a53ac93b57a38990a1332..bcd84032b22efe63f53adda3e6cf2259f8be9507 100644
--- a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_linux.sh
@@ -45,4 +45,4 @@ cat out
 cat out | grep '"buf_id" : 0'
 cat out | grep user_meta
 
-ls -ln ${receiver_folder}/1 | awk '{ print $5 }'| grep 60000000
+ls -ln ${receiver_folder}/processed/1 | awk '{ print $5 }'| grep 60000000
diff --git a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
index bc501b842e9d4a4e61aab6630ae8632202cbcdae..309236645ef298447f405c80657e0efb53fc7586 100644
--- a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
@@ -15,7 +15,7 @@ mkdir %receiver_folder%
 
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-FOR /F "usebackq" %%A IN ('%receiver_folder%\1') DO set size=%%~zA
+FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 60000000 goto :error
 
 echo db.data_default.find({"_id":1}) |  %mongo_exe% %beamtime_id%_detector  > out
diff --git a/tests/automatic/settings/authorizer_settings.json.tpl.lin b/tests/automatic/settings/authorizer_settings.json.tpl.lin
index bfab06d1339cfb1a3cb8de2e8da40cf1f5a9bbee..1c411f2b66702fe72e7b225bd1e1fe9ffb3b57ae 100644
--- a/tests/automatic/settings/authorizer_settings.json.tpl.lin
+++ b/tests/automatic/settings/authorizer_settings.json.tpl.lin
@@ -1,9 +1,16 @@
 {
   "Port": {{ env "NOMAD_PORT_authorizer" }},
   "LogLevel":"debug",
-  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test/2019/data/asapo_test"},
+  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test/2019/data/asapo_test",
+  "beamline-path":"/tmp/asapo/receiver/files/beamline/test/current"},
   {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test1/2019/data/asapo_test1"},
   {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test2/2019/data/asapo_test2"}],
   "SecretFile":"auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+  {
+     "Uri" : "ldap://localhost:389",
+     "BaseDn" : "ou=rgy,o=desy,c=de",
+     "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+  }
 }
diff --git a/tests/automatic/settings/authorizer_settings.json.tpl.win b/tests/automatic/settings/authorizer_settings.json.tpl.win
index 755b4d25c697a13124477123b9f20a0e50035322..e0fd183c0f8d472f991f6e64c8ae019548393ae4 100644
--- a/tests/automatic/settings/authorizer_settings.json.tpl.win
+++ b/tests/automatic/settings/authorizer_settings.json.tpl.win
@@ -1,9 +1,16 @@
 {
   "Port": {{ env "NOMAD_PORT_authorizer" }},
   "LogLevel":"debug",
-  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test\\2019\\data\\asapo_test"},
+  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test\\2019\\data\\asapo_test",
+  "beamline-path":"c:\\tmp\\asapo\\receiver\\files\\beamline\\test\\current"},
   {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test1\\2019\\data\\asapo_test1"},
   {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test2\\2019\\data\\asapo_test2"}],
   "SecretFile":"auth_secret.key",
-  "TokenDurationMin":600
+  "TokenDurationMin":600,
+  "Ldap":
+  {
+     "Uri" : "ldap://localhost:389",
+     "BaseDn" : "ou=rgy,o=desy,c=de",
+     "FilterTemplate" : "(cn=a3__BEAMLINE__-hosts)"
+  }
 }
diff --git a/tests/manual/performance_broker_receiver/getlast_broker.cpp b/tests/manual/performance_broker_receiver/getlast_broker.cpp
index d4ce9cf5190ebe684a533c1f4c90543f34686784..658af3435661e504e64bad6d4a90f0b40d9adb42 100644
--- a/tests/manual/performance_broker_receiver/getlast_broker.cpp
+++ b/tests/manual/performance_broker_receiver/getlast_broker.cpp
@@ -61,7 +61,7 @@ std::vector<std::thread> StartThreads(const Args& params,
         asapo::FileInfo fi;
         Error err;
         auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.file_path, true,
-                      asapo::SourceCredentials{params.beamtime_id, "", "", params.token}, &err);
+                      asapo::SourceCredentials{asapo::SourceType::kProcessed,params.beamtime_id, "", "", params.token}, &err);
         broker->SetTimeout((uint64_t) params.timeout_ms);
         asapo::FileData data;
 
diff --git a/tests/manual/producer_cpp/CMakeLists.txt b/tests/manual/producer_cpp/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..07d230c4b413da52d97373a31ed4e31d6a517dfd
--- /dev/null
+++ b/tests/manual/producer_cpp/CMakeLists.txt
@@ -0,0 +1,11 @@
+set(TARGET_NAME producer)
+set(SOURCE_FILES
+        producer.cpp
+        )
+
+add_executable(${TARGET_NAME} ${SOURCE_FILES})
+target_include_directories(${TARGET_NAME} PUBLIC include ${CMAKE_SOURCE_DIR}/common/cpp/include)
+
+#Add all necessary common libraries
+GET_PROPERTY(ASAPO_COMMON_IO_LIBRARIES GLOBAL PROPERTY ASAPO_COMMON_IO_LIBRARIES)
+target_link_libraries(${TARGET_NAME} ${ASAPO_COMMON_IO_LIBRARIES} asapo-producer)
diff --git a/tests/manual/producer_cpp/producer.cpp b/tests/manual/producer_cpp/producer.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..357b857f6d419c344c9463d6f22687f08ff4cdd6
--- /dev/null
+++ b/tests/manual/producer_cpp/producer.cpp
@@ -0,0 +1,111 @@
+#include <thread>
+#include <chrono>
+#include "asapo_producer.h"
+
+
+void ProcessAfterSend(asapo::RequestCallbackPayload payload, asapo::Error err) {
+    if (err) {
+        std::cerr << "error/warning during send: " << err << std::endl;
+        return;
+    } else {
+        std::cout << "successfuly send " << payload.original_header.Json() << std::endl;
+        return;
+    }
+}
+
+void exit_if_error(std::string error_string, const asapo::Error& err) {
+    if (err) {
+        std::cerr << error_string << err << std::endl;
+        //exit(EXIT_FAILURE);
+    }
+}
+
+std::string format_string(uint32_t in, std::string format="%05d")
+{
+    if(in > 99999)
+        in = 0;
+
+    char buf[6];
+    snprintf(buf,sizeof(buf),format.c_str(),in);
+    return std::string(buf);
+
+}
+
+
+int main(int argc, char* argv[]) {
+
+    uint32_t submodule = 1;
+    uint32_t sleeptime = 1;
+
+
+    if(argc >= 2)
+        submodule = atoi(argv[1]);
+
+    if(argc >=3)
+        sleeptime = atoi(argv[2]);
+
+
+    asapo::Error err;
+
+    auto endpoint = "localhost:8400"; // or your endpoint
+    auto beamtime = "asapo_test";
+
+    auto producer = asapo::Producer::Create(endpoint, 1,asapo::RequestHandlerType::kTcp,
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed,beamtime, "", "", ""}, 60, &err);
+    exit_if_error("Cannot start producer", err);
+
+    uint32_t eventid = 1;
+    uint32_t start_number = 1;
+
+    // number of files per acquistion per module
+    const uint32_t number_of_splitted_files = 5;
+
+    // number of modules
+    const uint32_t modules = 3;
+
+    while(true)
+    {
+        for(uint32_t part=1; part<=number_of_splitted_files; ++part)
+        {
+            std::string to_send = "processed/lambdatest_"
+                + format_string(start_number) // file start number (acquistion id)
+                + "_part" + format_string(part) // file part id (chunk id)
+                + "_m" + format_string(submodule, std::string("%02d"));
+            auto send_size = to_send.size() + 1;
+            auto buffer =  asapo::FileData(new uint8_t[send_size]);
+            memcpy(buffer.get(), to_send.c_str(), send_size);
+            std::string substream = std::to_string(start_number);
+            // std::cout<<"submodule:"<<submodule
+            //          <<"- substream:"<<substream
+            //          <<"- filename:"<<to_send<<std::endl;
+
+            asapo::EventHeader event_header{submodule, send_size, to_send,"", part,modules};
+            // err = producer->SendData(event_header,substream, std::move(buffer),
+            //                          asapo::kTransferMetaDataOnly, &ProcessAfterSend);
+
+            err = producer->SendData(event_header,substream, std::move(buffer),
+                                     asapo::kDefaultIngestMode, &ProcessAfterSend);
+            exit_if_error("Cannot send file", err);
+
+            err = producer->WaitRequestsFinished(1000);
+            exit_if_error("Producer exit on timeout", err);
+            std::this_thread::sleep_for (std::chrono::seconds(sleeptime));
+
+            // if(part == number_of_splitted_files)
+            // {
+
+            //     err = producer->SendSubstreamFinishedFlag(substream,
+            //                                               part,
+            //                                               std::to_string(start_number+1),
+            //                                               &ProcessAfterSend);
+            //     exit_if_error("Cannot send file", err);
+            // }
+
+        }
+        start_number++;
+
+    }
+
+
+    return EXIT_SUCCESS;
+}
diff --git a/tests/manual/python_tests/producer/short_test.py b/tests/manual/python_tests/producer/short_test.py
index e5e1b98e3039eef5259a022a9a63b2c0629d2d06..9fb04ff223109b7c7b229b3c10604caa0ffe695d 100644
--- a/tests/manual/python_tests/producer/short_test.py
+++ b/tests/manual/python_tests/producer/short_test.py
@@ -26,7 +26,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads ,0)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads ,0)
 
 producer.set_log_level("debug")
 
diff --git a/tests/manual/python_tests/producer/test.py b/tests/manual/python_tests/producer/test.py
index 2d364a7d7e4827f655e79b38229d88b7db457214..da68de94514b4c4a95c14f061db61b995cf263c0 100644
--- a/tests/manual/python_tests/producer/test.py
+++ b/tests/manual/python_tests/producer/test.py
@@ -27,7 +27,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads ,0)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads ,0)
 
 producer.set_log_level("info")
 
diff --git a/tests/manual/python_tests/producer_wait_bug_mongo/test.py b/tests/manual/python_tests/producer_wait_bug_mongo/test.py
index 9e420f33ccef2879ae05e46fa3a616edf241d88d..06d658cbc95aa68921d16f2d42a984ee62f92191 100644
--- a/tests/manual/python_tests/producer_wait_bug_mongo/test.py
+++ b/tests/manual/python_tests/producer_wait_bug_mongo/test.py
@@ -27,7 +27,7 @@ def assert_err(err):
         print(err)
         sys.exit(1)
 
-producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("debug")
 
diff --git a/tests/manual/python_tests/producer_wait_threads/producer_api.py b/tests/manual/python_tests/producer_wait_threads/producer_api.py
index 85ccd36c07dfe93c0018d1bb017fedbb8e18f11b..22fc727437f2f18fffa8c31017c1031a1b59c7dc 100644
--- a/tests/manual/python_tests/producer_wait_threads/producer_api.py
+++ b/tests/manual/python_tests/producer_wait_threads/producer_api.py
@@ -22,7 +22,7 @@ def callback(header,err):
         print ("successfuly sent: ",header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime, 'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime, 'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("info")
 
@@ -63,7 +63,7 @@ if n!=0:
 
 # create with error
 try:
-    producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, 0, 600)
+    producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, 0, 600)
 except Exception as Asapo:
     print(e)
 else:
diff --git a/tests/manual/python_tests/producer_wait_threads/test.py b/tests/manual/python_tests/producer_wait_threads/test.py
index 5ebe7b95caec871caed2240f232a8494b05857bf..d1fbaf05b81c169b0f7295b867fe9b091fc788a8 100644
--- a/tests/manual/python_tests/producer_wait_threads/test.py
+++ b/tests/manual/python_tests/producer_wait_threads/test.py
@@ -22,7 +22,7 @@ def callback(header,err):
         print ("successfuly sent: ",header)
     lock.release()
 
-producer  = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads, 600)
+producer  = asapo_producer.create_producer(endpoint,'processed',beamtime,'auto', stream, token, nthreads, 600)
 
 producer.set_log_level("info")