diff --git a/.gitignore b/.gitignore
index d7c8d680d0598ba1ac993843cf1d2ee7f45fad02..c04618641923b19dd84739be023d064d3c645ba3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -54,9 +54,7 @@
 .idea/**/libraries
 
 # CMake
-cmake-build-debug/
-cmake-build-release/
-cmake-build-relwithdebinfo/
+cmake-build-*/
 
 # Mongo Explorer plugin:
 .idea/**/mongoSettings.xml
diff --git a/authorizer/src/asapo_authorizer/server/authorize.go b/authorizer/src/asapo_authorizer/server/authorize.go
index 1b984a9c456a3f56e4d4793b3cecdaf9ffcc27eb..d5192fb9803b0df5dafe05c44e2ed0651903c794 100644
--- a/authorizer/src/asapo_authorizer/server/authorize.go
+++ b/authorizer/src/asapo_authorizer/server/authorize.go
@@ -294,7 +294,7 @@ func authorizeMeta(meta common.BeamtimeMeta, request authorizationRequest, creds
 
 	if creds.Beamline != "auto" && meta.Beamline != creds.Beamline {
 		err_string := "given beamline (" + creds.Beamline + ") does not match the found one (" + meta.Beamline + ")"
-		log.Debug(err_string)
+		log.Error(err_string)
 		return nil, errors.New(err_string)
 	}
 
@@ -330,8 +330,14 @@ func authorize(request authorizationRequest, creds SourceCredentials) (common.Be
 	}
 
 	meta.AccessTypes = accessTypes
-	log.Debug("authorized creds bl/bt: ", creds.Beamline+"/"+creds.BeamtimeId+", beamtime "+meta.BeamtimeId+" for "+request.OriginHost+" in "+
-		meta.Beamline+", type "+meta.Type, "online path "+meta.OnlinePath+", offline path "+meta.OfflinePath)
+	log.WithFields(map[string]interface{}{
+		"beamline":creds.Beamline,
+		"beamtime":creds.BeamtimeId,
+		"origin":request.OriginHost,
+		"type":meta.Type,
+		"onlinePath":meta.OnlinePath,
+		"offlinePath":meta.OfflinePath,
+	}).Debug("authorized credentials")
 	return meta, nil
 }
 
@@ -372,7 +378,7 @@ func routeAuthorize(w http.ResponseWriter, r *http.Request) {
 	}
 
 	w.WriteHeader(http.StatusOK)
-	w.Write([]byte(res))
+	w.Write(res)
 }
 
 func checkRole(w http.ResponseWriter, r *http.Request, role string) error {
diff --git a/authorizer/src/asapo_authorizer/server/authorize_test.go b/authorizer/src/asapo_authorizer/server/authorize_test.go
index 1c6e6400554b5ee6fe8fbc6dd324b8ad1ae7d346..e3bb6c11e23b699adc034354b57ced49ca63143e 100644
--- a/authorizer/src/asapo_authorizer/server/authorize_test.go
+++ b/authorizer/src/asapo_authorizer/server/authorize_test.go
@@ -387,3 +387,21 @@ func TestGetBeamtimeInfo(t *testing.T) {
 	}
 
 }
+
+func TestExpiredToken(t *testing.T) {
+	Auth = authorization.NewAuth(utils.NewJWTAuth("secret_user"), utils.NewJWTAuth("secret_admin"), utils.NewJWTAuth("secret"))
+	token := "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2MzU3NTMxMDksImp0aSI6ImMyOTR0NWFodHY1am9vZHVoaGNnIiwic3ViIjoiYnRfMTEwMTIxNzEiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIiwid3JpdGUiXX19.kITePbv_dXY2ACxpAQ-PeQJPQtnR02bMoFrXq0Pbcm0"
+	request := authorizationRequest{"asapo_test%%"+token, "host"}
+	creds, _ := getSourceCredentials(request)
+
+	creds.Token = token
+	creds.DataSource = "test"
+	creds.BeamtimeId = "11012171"
+	creds.Beamline = "p21.2"
+	_, err := authorizeByToken(creds)
+	assert.Error(t, err, "")
+	if (err!=nil) {
+		assert.Contains(t, err.Error(), "expired")
+	}
+
+}
diff --git a/authorizer/src/asapo_authorizer/server/folder_token.go b/authorizer/src/asapo_authorizer/server/folder_token.go
index 5d10ed86a844902a0c33101d0fd7862f5628e854..9e5c305e1b3f7edacf85110a7bbd537bbd6b7664 100644
--- a/authorizer/src/asapo_authorizer/server/folder_token.go
+++ b/authorizer/src/asapo_authorizer/server/folder_token.go
@@ -125,7 +125,10 @@ func routeFolderToken(w http.ResponseWriter, r *http.Request) {
 		return
 	}
 
-	log.Debug("generated folder token for beamtime " + request.BeamtimeId + ", folder " + request.Folder)
+	log.WithFields(map[string]interface{}{
+		"folder":request.Folder,
+		"beamtime":request.BeamtimeId,
+	}).Debug("issued folder token")
 
 	answer := folderTokenResponce(token)
 	w.WriteHeader(http.StatusOK)
diff --git a/authorizer/src/asapo_authorizer/server/introspect.go b/authorizer/src/asapo_authorizer/server/introspect.go
index 5dd591a2c37f274e552cba63aab7e60829b5431b..7355b3f50d7596c98b54fefec7ad1f7e33d6920e 100644
--- a/authorizer/src/asapo_authorizer/server/introspect.go
+++ b/authorizer/src/asapo_authorizer/server/introspect.go
@@ -46,7 +46,10 @@ func routeIntrospect(w http.ResponseWriter, r *http.Request) {
 		return
 	}
 
-	log.Debug("verified user token for "+response.Sub)
+	log.WithFields(map[string]interface{}{
+		"subject":response.Sub,
+	}).Debug("verified user token")
+
 
 	answer,_ := json.Marshal(&response)
 	w.WriteHeader(http.StatusOK)
diff --git a/authorizer/src/asapo_authorizer/server/issue_token.go b/authorizer/src/asapo_authorizer/server/issue_token.go
index bf2524abf9317081b2dd22f063cc1cf05d1774a9..e8da6089faac9a0fdb83f6f2db1b46b14d0cebb7 100644
--- a/authorizer/src/asapo_authorizer/server/issue_token.go
+++ b/authorizer/src/asapo_authorizer/server/issue_token.go
@@ -30,8 +30,8 @@ func extractUserTokenrequest(r *http.Request) (request structs.IssueTokenRequest
 	}
 
 	for _, ar := range request.AccessTypes {
-		if ar != "read" && ar != "write" && !(ar== "writeraw" && request.Subject["beamline"]!="") {
-			return request, errors.New("wrong requested access rights: "+ar)
+		if ar != "read" && ar != "write" && !(ar == "writeraw" && request.Subject["beamline"] != "") {
+			return request, errors.New("wrong requested access rights: " + ar)
 		}
 	}
 
@@ -72,8 +72,12 @@ func issueUserToken(w http.ResponseWriter, r *http.Request) {
 		return
 	}
 
-
-	log.Debug("generated user token ")
+	log.WithFields(map[string]interface{}{
+		"id":      claims.Id,
+		"subject": claims.Subject,
+		"validDays": request.DaysValid,
+		"accessTypes": request.AccessTypes,
+	}).Info("issued user token")
 
 	answer := authorization.UserTokenResponce(request, token)
 	w.WriteHeader(http.StatusOK)
diff --git a/authorizer/src/asapo_authorizer/server/revoke_token.go b/authorizer/src/asapo_authorizer/server/revoke_token.go
index c1c98c6f2adf4c3a3fe05fd94b5773352861b75c..eb1dea35dada2d21854f9d9b9ffd822a59bb4c00 100644
--- a/authorizer/src/asapo_authorizer/server/revoke_token.go
+++ b/authorizer/src/asapo_authorizer/server/revoke_token.go
@@ -32,7 +32,12 @@ func revokeToken(w http.ResponseWriter, r *http.Request) {
 		return
 	}
 
-	log.Debug("revoked token " + rec.Token)
+	log.WithFields(map[string]interface{}{
+		"id":      rec.Id,
+		"subject": rec.Subject,
+		"token": rec.Token,
+	}).Info("revoked token")
+
 	answer, _ := json.Marshal(&rec)
 	w.WriteHeader(http.StatusOK)
 	w.Write(answer)
diff --git a/authorizer/src/asapo_authorizer/server/server_test.go b/authorizer/src/asapo_authorizer/server/server_test.go
deleted file mode 100644
index f18a0f738e4b4ed9397f8449a679f8650b7f6928..0000000000000000000000000000000000000000
--- a/authorizer/src/asapo_authorizer/server/server_test.go
+++ /dev/null
@@ -1,143 +0,0 @@
-package server
-
-/*
-
-import (
-	"asapo_authorizer/database"
-	"asapo_common/discovery"
-	"asapo_common/logger"
-	"errors"
-	"github.com/stretchr/testify/assert"
-	"github.com/stretchr/testify/mock"
-	"net/http"
-	"net/http/httptest"
-	"testing"
-)
-
-func setup() *database.MockedDatabase {
-	mock_db := new(database.MockedDatabase)
-	mock_db.On("Connect", mock.AnythingOfType("string")).Return(nil)
-
-	return mock_db
-}
-
-func setup_and_init(t *testing.T) *database.MockedDatabase {
-	mock_db := new(database.MockedDatabase)
-	mock_db.On("Connect", mock.AnythingOfType("string")).Return(nil)
-
-	InitDB(mock_db)
-	assertExpectations(t, mock_db)
-	return mock_db
-}
-
-func assertExpectations(t *testing.T, mock_db *database.MockedDatabase) {
-	mock_db.AssertExpectations(t)
-	mock_db.ExpectedCalls = nil
-	logger.MockLog.AssertExpectations(t)
-	logger.MockLog.ExpectedCalls = nil
-}
-
-var initDBTests = []struct {
-	address string
-	answer  error
-	message string
-}{
-	{"bad address", errors.New(""), "error on get bad address"},
-	{"good address", nil, "no error on good address"},
-}
-
-func TestInitDBWithWrongAddress(t *testing.T) {
-	mock_db := setup()
-
-	mock_db.ExpectedCalls = nil
-
-	settings.DatabaseServer = "0.0.0.0:0000"
-
-	for _, test := range initDBTests {
-		mock_db.On("Connect", "0.0.0.0:0000").Return(test.answer)
-
-		err := InitDB(mock_db)
-
-		assert.Equal(t, test.answer, err, test.message)
-		assertExpectations(t, mock_db)
-	}
-	db = nil
-}
-
-func TestInitDBWithAutoAddress(t *testing.T) {
-	mongo_address := "0.0.0.0:0000"
-	mock_db := setup()
-
-	mock_db.ExpectedCalls = nil
-
-	settings.DatabaseServer = "auto"
-	mock_server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
-		assert.Equal(t, req.URL.String(), "/asapo-mongodb", "request string")
-		rw.Write([]byte(mongo_address))
-	}))
-	defer mock_server.Close()
-
-	discoveryService = discovery.CreateDiscoveryService(mock_server.Client(), mock_server.URL)
-
-	mock_db.On("Connect", "0.0.0.0:0000").Return(nil)
-
-	err := InitDB(mock_db)
-
-	assert.Equal(t, nil, err, "auto connect ok")
-	assertExpectations(t, mock_db)
-	db = nil
-}
-
-func TestReconnectDB(t *testing.T) {
-	mongo_address := "0.0.0.0:0000"
-	mock_server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
-		assert.Equal(t, req.URL.String(), "/asapo-mongodb", "request string")
-		rw.Write([]byte(mongo_address))
-	}))
-	discoveryService = discovery.CreateDiscoveryService(mock_server.Client(), mock_server.URL)
-
-	defer mock_server.Close()
-
-	settings.DatabaseServer = "auto"
-	mock_db := setup_and_init(t)
-	mock_db.ExpectedCalls = nil
-
-	mongo_address = "1.0.0.0:0000"
-
-	mock_db.On("Close").Return()
-
-	mock_db.On("Connect", "1.0.0.0:0000").Return(nil)
-
-	err := ReconnectDb()
-	assert.Equal(t, nil, err, "auto connect ok")
-	assertExpectations(t, mock_db)
-
-	db = nil
-}
-
-func TestErrorWhenReconnectNotConnectedDB(t *testing.T) {
-	err := ReconnectDb()
-	assert.NotNil(t, err, "error reconnect")
-	db = nil
-}
-
-
-func TestCleanupDBWithoutInit(t *testing.T) {
-	mock_db := setup()
-
-	mock_db.AssertNotCalled(t, "Close")
-
-	CleanupDB()
-}
-
-func TestCleanupDBInit(t *testing.T) {
-	settings.DatabaseServer = "0.0.0.0"
-	mock_db := setup_and_init(t)
-
-	mock_db.On("Close").Return()
-
-	CleanupDB()
-
-	assertExpectations(t, mock_db)
-}
-*/
\ No newline at end of file
diff --git a/authorizer/src/asapo_authorizer/token_store/token_store.go b/authorizer/src/asapo_authorizer/token_store/token_store.go
index 756d7cc2d970292697c0b2fd4ecd0fcebcd909a9..48e45fbc090ad46b99e28fa9d1daaab2c2394bb2 100644
--- a/authorizer/src/asapo_authorizer/token_store/token_store.go
+++ b/authorizer/src/asapo_authorizer/token_store/token_store.go
@@ -51,7 +51,7 @@ func (store *TokenStore) initDB() (dbaddress string, err error) {
 		if dbaddress == "" {
 			return "", errors.New("no token_store servers found")
 		}
-		log.Debug("Got mongodb server: " + dbaddress)
+		log.WithFields(map[string]interface{}{"address": dbaddress}).Debug("found mongodb server")
 	}
 	return dbaddress, store.db.Connect(dbaddress)
 
@@ -66,7 +66,7 @@ func (store *TokenStore) reconnectIfNeeded(db_error error) {
 	if dbaddress, err := store.reconnectDb(); err != nil {
 		log.Error("cannot reconnect to database: " + err.Error())
 	} else {
-		log.Debug("reconnected to database at" + dbaddress)
+		log.WithFields(map[string]interface{}{"address":dbaddress}).Debug("reconnected to database")
 	}
 }
 
@@ -196,7 +196,7 @@ func (store *TokenStore) loopGetRevokedTokens() {
 			next_update = 1
 			log.Error("cannot get revoked tokens: " + err.Error())
 		} else {
-			log.Debug("received revoked tokens list")
+			//log.Debug("received revoked tokens list")
 			next_update = common.Settings.UpdateRevokedTokensIntervalSec
 			tokens := make([]string, len(res))
 			for i, token := range res {
diff --git a/authorizer/src/asapo_authorizer/token_store/token_store_test.go b/authorizer/src/asapo_authorizer/token_store/token_store_test.go
index d648f0ef3afbcf9d259237f425a3ca7ccc85e449..4c5c271ab12b05d1248f6429ada35ef37ac69eeb 100644
--- a/authorizer/src/asapo_authorizer/token_store/token_store_test.go
+++ b/authorizer/src/asapo_authorizer/token_store/token_store_test.go
@@ -60,6 +60,7 @@ func (suite *TokenStoreTestSuite) TestProcessRequestWithConnectionError() {
 	ExpectReconnect(suite.mock_db)
 	suite.mock_db.On("ProcessRequest", mock.Anything, mock.Anything).Return([]byte(""),
 		&DBError{utils.StatusServiceUnavailable, ""})
+	logger.MockLog.On("WithFields", mock.Anything)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("reconnected")))
 
 	err := suite.store.AddToken(TokenRecord{})
@@ -138,8 +139,6 @@ func (suite *TokenStoreTestSuite) TestProcessRequestCheckRevokedToken() {
 		Op:         "read_records",
 	}
 	suite.mock_db.On("ProcessRequest", req, mock.Anything).Return([]byte(""), nil)
-
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("list")))
 	time.Sleep(time.Second*1)
 	res,err := suite.store.IsTokenRevoked("123")
 	suite.Equal(err, nil, "ok")
diff --git a/broker/src/asapo_broker/database/database.go b/broker/src/asapo_broker/database/database.go
index 7e48a6e526a07a1d316f240acba65384c32829a2..814d1a321e701f4e1e91dc5dc958f319a9ee4966 100644
--- a/broker/src/asapo_broker/database/database.go
+++ b/broker/src/asapo_broker/database/database.go
@@ -1,9 +1,13 @@
 package database
 
-import "asapo_common/utils"
+import (
+	"asapo_common/logger"
+	"asapo_common/utils"
+)
 
 type Request struct {
-	DbName         string
+	Beamtime       string
+	DataSource     string
 	Stream         string
 	GroupId        string
 	Op             string
@@ -12,6 +16,20 @@ type Request struct {
 	ExtraParam     string
 }
 
+func (request *Request) Logger() logger.Logger {
+	return logger.WithFields(map[string]interface{}{
+		"beamtime":   request.Beamtime,
+		"dataSource": decodeString(request.DataSource),
+		"stream":     decodeString(request.Stream),
+		"groupId":    decodeString(request.GroupId),
+		"operation":  request.Op,
+	})
+}
+
+func (request *Request) DbName() string {
+	return request.Beamtime + "_" + request.DataSource
+}
+
 type Agent interface {
 	ProcessRequest(request Request) ([]byte, error)
 	Ping() error
@@ -21,7 +39,7 @@ type Agent interface {
 }
 
 type DBSettings struct {
-	ReadFromInprocessPeriod int
+	ReadFromInprocessPeriod   int
 	UpdateStreamCachePeriodMs int
 }
 
@@ -42,4 +60,3 @@ func GetStatusCodeFromError(err error) int {
 		return utils.StatusServiceUnavailable
 	}
 }
-
diff --git a/broker/src/asapo_broker/database/encoding.go b/broker/src/asapo_broker/database/encoding.go
index 6e61d95f0d6a6fe462aca48754223d38da30616b..86b477bd23498b4dae00070fa50caa0d9c710452 100644
--- a/broker/src/asapo_broker/database/encoding.go
+++ b/broker/src/asapo_broker/database/encoding.go
@@ -80,8 +80,9 @@ func encodeStringForColName(original string) (result string) {
 }
 
 func encodeRequest(request *Request) error {
-	request.DbName = encodeStringForDbName(request.DbName)
-	if len(request.DbName)> max_encoded_source_size {
+	request.DataSource = encodeStringForDbName(request.DataSource)
+	request.Beamtime = encodeStringForDbName(request.Beamtime)
+	if len(request.DbName())> max_encoded_source_size {
 		return &DBError{utils.StatusWrongInput, "source name is too long"}
 	}
 
diff --git a/broker/src/asapo_broker/database/encoding_test.go b/broker/src/asapo_broker/database/encoding_test.go
index 1b018289e8f1d6271b97b4a6a1e6ff9925e24ffe..82447de77f75f462db668a160999ea23dcc35b54 100644
--- a/broker/src/asapo_broker/database/encoding_test.go
+++ b/broker/src/asapo_broker/database/encoding_test.go
@@ -18,7 +18,8 @@ func TestEncoding(t *testing.T) {
 	assert.Equal(t, sourceDecoded, source)
 
 	r := Request{
-		DbName:         source,
+		Beamtime:         "bt",
+		DataSource:         source,
 		Stream:         stream,
 		GroupId:        stream,
 		Op:             "",
@@ -29,7 +30,7 @@ func TestEncoding(t *testing.T) {
 	err := encodeRequest(&r)
 	assert.Equal(t, r.Stream, streamEncoded)
 	assert.Equal(t, r.GroupId, streamEncoded)
-	assert.Equal(t, r.DbName, sourceEncoded)
+	assert.Equal(t, r.DataSource, sourceEncoded)
 
 	assert.Nil(t, err)
 }
@@ -61,9 +62,10 @@ func TestEncodingTooLong(t *testing.T) {
 	for _, test := range encodeTests {
 		stream := RandomString(test.streamSize)
 		group := RandomString(test.groupSize)
-		source := RandomString(test.sourceSize)
+		source := RandomString(test.sourceSize-3)
 		r := Request{
-			DbName:         source,
+			Beamtime:         "bt",
+			DataSource:         source,
 			Stream:         stream,
 			GroupId:        group,
 			Op:             "",
diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go
index 0291da19f76a6235dbfcc947306038414a7b53e4..7a9d2c13ddd66cab74161cca616a3f4dc77c0946 100644
--- a/broker/src/asapo_broker/database/mongodb.go
+++ b/broker/src/asapo_broker/database/mongodb.go
@@ -1,9 +1,10 @@
-//+build !test
+//go:build !test
+// +build !test
 
 package database
 
 import (
-	"asapo_common/logger"
+	log "asapo_common/logger"
 	"asapo_common/utils"
 	"context"
 	"encoding/json"
@@ -84,10 +85,10 @@ const (
 
 type fieldChangeRequest struct {
 	collectionName string
-	fieldName string
-	op        int
-	max_ind   int
-	val       int
+	fieldName      string
+	op             int
+	max_ind        int
+	val            int
 }
 
 var dbSessionLock sync.Mutex
@@ -199,7 +200,7 @@ func maxIndexQuery(request Request, returnIncompete bool) bson.M {
 }
 
 func (db *Mongodb) getMaxIndex(request Request, returnIncompete bool) (max_id int, err error) {
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
+	c := db.client.Database(request.DbName()).Collection(data_collection_name_prefix + request.Stream)
 	q := maxIndexQuery(request, returnIncompete)
 
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
@@ -227,7 +228,7 @@ func duplicateError(err error) bool {
 func (db *Mongodb) setCounter(request Request, ind int) (err error) {
 	update := bson.M{"$set": bson.M{pointer_field_name: ind}}
 	opts := options.Update().SetUpsert(true)
-	c := db.client.Database(request.DbName).Collection(pointer_collection_name)
+	c := db.client.Database(request.DbName()).Collection(pointer_collection_name)
 	q := bson.M{"_id": request.GroupId + "_" + request.Stream}
 	_, err = c.UpdateOne(context.TODO(), q, update, opts)
 	return
@@ -252,7 +253,7 @@ func (db *Mongodb) changeField(request Request, change fieldChangeRequest, res i
 
 	opts := options.FindOneAndUpdate().SetUpsert(true).SetReturnDocument(options.After)
 	q := bson.M{"_id": request.GroupId + "_" + request.Stream, change.fieldName: bson.M{"$lt": change.max_ind}}
-	c := db.client.Database(request.DbName).Collection(change.collectionName)
+	c := db.client.Database(request.DbName()).Collection(change.collectionName)
 
 	err = c.FindOneAndUpdate(context.TODO(), q, update, opts).Decode(res)
 	if err != nil {
@@ -306,12 +307,11 @@ func recordContainsPartialData(request Request, rec map[string]interface{}) bool
 
 func (db *Mongodb) getRecordFromDb(request Request, id, id_max int) (res map[string]interface{}, err error) {
 	q := bson.M{"_id": id}
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
+	c := db.client.Database(request.DbName()).Collection(data_collection_name_prefix + request.Stream)
 	err = c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
 	if err != nil {
 		answer := encodeAnswer(id, id_max, "")
-		log_str := "error getting record id " + strconv.Itoa(id) + " for " + request.DbName + " : " + err.Error()
-		logger.Debug(log_str)
+		request.Logger().WithFields(map[string]interface{}{"id": id, "cause": err.Error()}).Debug("error getting record")
 		return res, &DBError{utils.StatusNoData, answer}
 	}
 	return res, err
@@ -327,8 +327,7 @@ func (db *Mongodb) getRecordByIDRaw(request Request, id, id_max int) ([]byte, er
 		return nil, err
 	}
 
-	log_str := "got record id " + strconv.Itoa(id) + " for " + request.DbName
-	logger.Debug(log_str)
+	request.Logger().WithFields(map[string]interface{}{"id": id}).Debug("got record from db")
 
 	record, err := utils.MapToJson(&res)
 	if err != nil {
@@ -392,7 +391,7 @@ func (db *Mongodb) negAckRecord(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, input.Id, input.Params.DelayMs, 1, true)
+	err = db.InsertRecordToInprocess(request.DbName(), inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, input.Id, input.Params.DelayMs, 1, true)
 	return []byte(""), err
 }
 
@@ -402,7 +401,7 @@ func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 	if err != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
+	c := db.client.Database(request.DbName()).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err = c.InsertOne(context.Background(), &record)
 	if err != nil {
 		if duplicateError(err) {
@@ -411,7 +410,7 @@ func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 		return nil, err
 	}
 
-	c = db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
+	c = db.client.Database(request.DbName()).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err_del := c.DeleteOne(context.Background(), bson.M{"_id": record.ID})
 	if err_del != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
@@ -425,7 +424,7 @@ func (db *Mongodb) checkDatabaseOperationPrerequisites(request Request) error {
 		return &DBError{utils.StatusServiceUnavailable, no_session_msg}
 	}
 
-	if len(request.DbName) == 0 || len(request.Stream) == 0 {
+	if len(request.DbName()) <= 1 || len(request.Stream) == 0 {
 		return &DBError{utils.StatusWrongInput, "beamtime_id ans stream must be set"}
 	}
 
@@ -445,9 +444,9 @@ func (db *Mongodb) getCurrentPointer(request Request) (LocationPointer, int, err
 	var curPointer LocationPointer
 	err = db.changeField(request, fieldChangeRequest{
 		collectionName: pointer_collection_name,
-		fieldName: pointer_field_name,
-		op:        field_op_inc,
-		max_ind:   max_ind}, &curPointer)
+		fieldName:      pointer_field_name,
+		op:             field_op_inc,
+		max_ind:        max_ind}, &curPointer)
 	if err != nil {
 		return LocationPointer{}, 0, err
 	}
@@ -455,7 +454,7 @@ func (db *Mongodb) getCurrentPointer(request Request) (LocationPointer, int, err
 	return curPointer, max_ind, nil
 }
 
-func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delayMs int, nResendAttempts int) (int, error) {
+func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delayMs int, nResendAttempts int, rlog log.Logger) (int, error) {
 	var res InProcessingRecord
 	opts := options.FindOneAndUpdate().SetUpsert(false).SetReturnDocument(options.After)
 	tNow := time.Now().UnixNano()
@@ -476,8 +475,7 @@ func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delay
 		return 0, err
 	}
 
-	log_str := "got unprocessed id " + strconv.Itoa(res.ID) + " for " + dbname
-	logger.Debug(log_str)
+	rlog.WithFields(map[string]interface{}{"id": res.ID}).Debug("got unprocessed message")
 	return res.ID, nil
 }
 
@@ -527,10 +525,10 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 	t := db.lastReadFromInprocess[request.Stream+"_"+request.GroupId]
 	dbSessionLock.Unlock()
 	if (t <= tNow-int64(db.settings.ReadFromInprocessPeriod)) || ignoreTimeout {
-		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, delayMs, nResendAttempts)
+		record_ind, err = db.getUnProcessedId(request.DbName(), inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, delayMs, nResendAttempts,
+			request.Logger())
 		if err != nil {
-			log_str := "error getting unprocessed id " + request.DbName + ", groupid: " + request.GroupId + ":" + err.Error()
-			logger.Debug(log_str)
+			request.Logger().WithFields(map[string]interface{}{"cause": err.Error()}).Debug("error getting unprocessed message")
 			return 0, 0, err
 		}
 	}
@@ -552,12 +550,10 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 func (db *Mongodb) getNextAndMaxIndexesFromCurPointer(request Request) (int, int, error) {
 	curPointer, max_ind, err := db.getCurrentPointer(request)
 	if err != nil {
-		log_str := "error getting next pointer for " + request.DbName + ", groupid: " + request.GroupId + ":" + err.Error()
-		logger.Debug(log_str)
+		request.Logger().WithFields(map[string]interface{}{"cause": err.Error()}).Debug("error getting next pointer")
 		return 0, 0, err
 	}
-	log_str := "got next pointer " + strconv.Itoa(curPointer.Value) + " for " + request.DbName + ", groupid: " + request.GroupId
-	logger.Debug(log_str)
+	request.Logger().WithFields(map[string]interface{}{"id": curPointer.Value}).Debug("got next pointer")
 	return curPointer.Value, max_ind, nil
 }
 
@@ -622,8 +618,7 @@ func checkStreamFinished(request Request, id, id_max int, data map[string]interf
 	if !ok || !r.FinishedStream {
 		return nil
 	}
-	log_str := "reached end of stream " + request.Stream + " , next_stream: " + r.NextStream
-	logger.Debug(log_str)
+	request.Logger().WithFields(map[string]interface{}{"nextStream": r.NextStream}).Debug("reached end of stream")
 
 	answer := encodeAnswer(r.ID-1, r.ID-1, r.NextStream)
 	return &DBError{utils.StatusNoData, answer}
@@ -641,7 +636,7 @@ func (db *Mongodb) getNextRecord(request Request) ([]byte, error) {
 	}
 
 	if err == nil {
-		err_update := db.InsertToInprocessIfNeeded(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, nextInd, request.ExtraParam)
+		err_update := db.InsertToInprocessIfNeeded(request.DbName(), inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, nextInd, request.ExtraParam)
 		if err_update != nil {
 			return nil, err_update
 		}
@@ -666,10 +661,10 @@ func (db *Mongodb) getLastRecordInGroup(request Request) ([]byte, error) {
 	var res map[string]interface{}
 	err = db.changeField(request, fieldChangeRequest{
 		collectionName: last_message_collection_name,
-		fieldName: last_message_field_name,
-		op:        field_op_set,
-		max_ind:   max_ind,
-		val:       max_ind,
+		fieldName:      last_message_field_name,
+		op:             field_op_set,
+		max_ind:        max_ind,
+		val:            max_ind,
 	}, &res)
 	if err != nil {
 		return nil, err
@@ -689,7 +684,7 @@ func getSizeFilter(request Request) bson.M {
 }
 
 func (db *Mongodb) getSize(request Request) ([]byte, error) {
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
+	c := db.client.Database(request.DbName()).Collection(data_collection_name_prefix + request.Stream)
 
 	filter := getSizeFilter(request)
 	size, err := c.CountDocuments(context.TODO(), filter, options.Count())
@@ -716,7 +711,7 @@ func (db *Mongodb) resetCounter(request Request) ([]byte, error) {
 		return []byte(""), err
 	}
 
-	c := db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
+	c := db.client.Database(request.DbName()).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err_del := c.DeleteMany(context.Background(), bson.M{"_id": bson.M{"$gte": id}})
 	if err_del != nil {
 		return nil, &DBError{utils.StatusWrongInput, err_del.Error()}
@@ -743,40 +738,35 @@ func (db *Mongodb) getMeta(request Request) ([]byte, error) {
 	}
 	q := bson.M{"_id": id}
 	var res map[string]interface{}
-	c := db.client.Database(request.DbName).Collection(meta_collection_name)
+	c := db.client.Database(request.DbName()).Collection(meta_collection_name)
 	err = c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
 	if err != nil {
-		log_str := "error getting meta for " + id + " in " + request.DbName + " : " + err.Error()
-		logger.Debug(log_str)
+		request.Logger().WithFields(map[string]interface{}{"id": id, "cause": err.Error()}).Debug("error getting meta")
 		return nil, &DBError{utils.StatusNoData, err.Error()}
 	}
 	userMeta, ok := res["meta"]
 	if !ok {
-		log_str := "error getting meta for " + id + " in " + request.DbName + " : cannot parse database response"
-		logger.Error(log_str)
-		return nil, errors.New(log_str)
+		request.Logger().WithFields(map[string]interface{}{"id": id, "cause": "cannot parse database response"}).Debug("error getting meta")
+		return nil, errors.New("cannot get metadata")
 	}
-	log_str := "got metadata for " + id + " in " + request.DbName
-	logger.Debug(log_str)
+	request.Logger().WithFields(map[string]interface{}{"id": id}).Error("got metadata")
 	return utils.MapToJson(&userMeta)
 }
 
-func (db *Mongodb) processQueryError(query, dbname string, err error) ([]byte, error) {
-	log_str := "error processing query: " + query + " for " + dbname + " : " + err.Error()
-	logger.Debug(log_str)
+func (db *Mongodb) processQueryError(query, dbname string, err error, rlog log.Logger) ([]byte, error) {
+	rlog.WithFields(map[string]interface{}{"query": query, "cause": err.Error()}).Debug("error processing query")
 	return nil, &DBError{utils.StatusNoData, err.Error()}
 }
 
 func (db *Mongodb) queryMessages(request Request) ([]byte, error) {
 	var res []map[string]interface{}
-	q, sort, err := db.BSONFromSQL(request.DbName, request.ExtraParam)
+	q, sort, err := db.BSONFromSQL(request.DbName(), request.ExtraParam)
 	if err != nil {
-		log_str := "error parsing query: " + request.ExtraParam + " for " + request.DbName + " : " + err.Error()
-		logger.Debug(log_str)
+		request.Logger().WithFields(map[string]interface{}{"query": request.ExtraParam, "cause": err.Error()}).Debug("error parsing query")
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
+	c := db.client.Database(request.DbName()).Collection(data_collection_name_prefix + request.Stream)
 	opts := options.Find()
 
 	if len(sort) > 0 {
@@ -786,15 +776,15 @@ func (db *Mongodb) queryMessages(request Request) ([]byte, error) {
 
 	cursor, err := c.Find(context.TODO(), q, opts)
 	if err != nil {
-		return db.processQueryError(request.ExtraParam, request.DbName, err)
+		return db.processQueryError(request.ExtraParam, request.DbName(), err, request.Logger())
 	}
 	err = cursor.All(context.TODO(), &res)
 	if err != nil {
-		return db.processQueryError(request.ExtraParam, request.DbName, err)
+		return db.processQueryError(request.ExtraParam, request.DbName(), err, request.Logger())
 	}
 
-	log_str := "processed query " + request.ExtraParam + " for " + request.DbName + " ,found" + strconv.Itoa(len(res)) + " records"
-	logger.Debug(log_str)
+	request.Logger().WithFields(map[string]interface{}{"query": request.ExtraParam, "recordsFound": len(res)}).Debug("processed query")
+
 	if res != nil {
 		return utils.MapToJson(&res)
 	} else {
@@ -880,11 +870,11 @@ func (db *Mongodb) nacks(request Request) ([]byte, error) {
 }
 
 func (db *Mongodb) deleteCollection(request Request, name string) error {
-	return db.client.Database(request.DbName).Collection(name).Drop(context.Background())
+	return db.client.Database(request.DbName()).Collection(name).Drop(context.Background())
 }
 
 func (db *Mongodb) collectionExist(request Request, name string) (bool, error) {
-	result, err := db.client.Database(request.DbName).ListCollectionNames(context.TODO(), bson.M{"name": name})
+	result, err := db.client.Database(request.DbName()).ListCollectionNames(context.TODO(), bson.M{"name": name})
 	if err != nil {
 		return false, err
 	}
@@ -910,7 +900,7 @@ func (db *Mongodb) deleteDataCollection(errorOnNotexist bool, request Request) e
 
 func (db *Mongodb) deleteDocumentsInCollection(request Request, collection string, field string, pattern string) error {
 	filter := bson.M{field: bson.D{{"$regex", primitive.Regex{Pattern: pattern, Options: "i"}}}}
-	_, err := db.client.Database(request.DbName).Collection(collection).DeleteMany(context.TODO(), filter)
+	_, err := db.client.Database(request.DbName()).Collection(collection).DeleteMany(context.TODO(), filter)
 	return err
 }
 
@@ -923,7 +913,7 @@ func escapeQuery(query string) (res string) {
 }
 
 func (db *Mongodb) deleteCollectionsWithPrefix(request Request, prefix string) error {
-	cols, err := db.client.Database(request.DbName).ListCollectionNames(context.TODO(), bson.M{"name": bson.D{
+	cols, err := db.client.Database(request.DbName()).ListCollectionNames(context.TODO(), bson.M{"name": bson.D{
 		{"$regex", primitive.Regex{Pattern: "^" + escapeQuery(prefix), Options: "i"}}}})
 	if err != nil {
 		return err
@@ -966,7 +956,7 @@ func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, "wrong params: " + request.ExtraParam}
 	}
 	if !*params.DeleteMeta {
-		logger.Debug("skipping delete stream meta for " + request.Stream + " in " + request.DbName)
+		request.Logger().Debug("skipping delete stream meta")
 		return nil, nil
 	}
 
@@ -980,7 +970,7 @@ func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
 }
 
 func (db *Mongodb) lastAck(request Request) ([]byte, error) {
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
+	c := db.client.Database(request.DbName()).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
 	result := LastAck{0}
 	var q bson.M = nil
@@ -1047,7 +1037,7 @@ func extractNacsFromCursor(err error, cursor *mongo.Cursor) ([]int, error) {
 }
 
 func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, error) {
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
+	c := db.client.Database(request.DbName()).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 
 	if res, err, ok := db.canAvoidDbRequest(min_index, max_index, c); ok {
 		return res, err
@@ -1062,7 +1052,7 @@ func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, e
 func (db *Mongodb) getStreams(request Request) ([]byte, error) {
 	rec, err := streams.getStreams(db, request)
 	if err != nil {
-		return db.processQueryError("get streams", request.DbName, err)
+		return db.processQueryError("get streams", request.DbName(), err, request.Logger())
 	}
 	return json.Marshal(&rec)
 }
diff --git a/broker/src/asapo_broker/database/mongodb_streams.go b/broker/src/asapo_broker/database/mongodb_streams.go
index a182f5080409c00116af1958d2b65dcc49983e75..b57f9973ddfa997af4706f75ece08e6d5969fa61 100644
--- a/broker/src/asapo_broker/database/mongodb_streams.go
+++ b/broker/src/asapo_broker/database/mongodb_streams.go
@@ -36,7 +36,7 @@ var streams = Streams{lastSynced: make(map[string]time.Time, 0),lastUpdated: mak
 var streamsLock sync.Mutex
 
 func (ss *Streams) tryGetFromCache(db_name string, updatePeriodMs int) (StreamsRecord, error) {
-	if time.Now().Sub(ss.lastUpdated[db_name]).Milliseconds() > int64(updatePeriodMs) {
+	if time.Now().Sub(ss.lastUpdated[db_name]).Milliseconds() >= int64(updatePeriodMs) {
 		return StreamsRecord{}, errors.New("cache expired")
 	}
 	rec, ok := ss.records[db_name]
@@ -265,9 +265,9 @@ func (ss *Streams) getStreams(db *Mongodb, request Request) (StreamsRecord, erro
 	}
 
 	streamsLock.Lock()
-	rec, err := ss.tryGetFromCache(request.DbName, db.settings.UpdateStreamCachePeriodMs)
+	rec, err := ss.tryGetFromCache(request.DbName(), db.settings.UpdateStreamCachePeriodMs)
 	if err != nil {
-		rec, err = ss.updateFromDb(db, request.DbName)
+		rec, err = ss.updateFromDb(db, request.DbName())
 	}
 	streamsLock.Unlock()
 	if err != nil {
diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go
index 09bf8ab1cb9f16605f5261ba17e29929b08b6e08..d7f38ad4745aac84d3a3da6734a171fb63c2b646 100644
--- a/broker/src/asapo_broker/database/mongodb_test.go
+++ b/broker/src/asapo_broker/database/mongodb_test.go
@@ -36,7 +36,9 @@ type TestDataset struct {
 
 var db Mongodb
 
-const dbname = "12345"
+const beamtime = "bt"
+const datasource = "12345"
+const dbname = "bt_12345"
 const collection = "stream"
 const collection2 = "stream2"
 const dbaddress = "127.0.0.1:27017"
@@ -100,17 +102,17 @@ func TestMongoDBConnectOK(t *testing.T) {
 }
 
 func TestMongoDBGetNextErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
 func TestMongoDBGetMetaErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "0"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "meta", ExtraParam: "0"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
 func TestMongoDBQueryMessagesErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: "0"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "querymessages", ExtraParam: "0"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
@@ -124,7 +126,7 @@ func TestMongoDBGetNextErrorWhenWrongDatabasename(t *testing.T) {
 func TestMongoDBGetNextErrorWhenNonExistingDatacollectionname(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: "bla", GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: "bla", GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -132,7 +134,7 @@ func TestMongoDBGetNextErrorWhenNonExistingDatacollectionname(t *testing.T) {
 func TestMongoDBGetLastErrorWhenNonExistingDatacollectionname(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: "bla", GroupId: groupId, Op: "last"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: "bla", GroupId: groupId, Op: "last"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -140,7 +142,7 @@ func TestMongoDBGetLastErrorWhenNonExistingDatacollectionname(t *testing.T) {
 func TestMongoDBGetByIdErrorWhenNoData(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
@@ -150,7 +152,7 @@ func TestMongoDBGetNextErrorWhenRecordNotThereYet(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec2)
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":2,\"next_stream\":\"\"}", err.Error())
 }
@@ -159,7 +161,7 @@ func TestMongoDBGetNextOK(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -170,8 +172,8 @@ func TestMongoDBGetNextErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -183,9 +185,9 @@ func TestMongoDBGetNextErrorOnFinishedStreamAlways(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -199,7 +201,7 @@ func TestMongoDBGetByIdErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -211,7 +213,7 @@ func TestMongoDBGetLastErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last"})
 	fmt.Println(string(res))
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -221,8 +223,8 @@ func TestMongoDBGetNextErrorOnNoMoreData(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"\"}", err.(*DBError).Message)
@@ -233,8 +235,8 @@ func TestMongoDBGetNextCorrectOrder(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec1)
-	res1, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	res2, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res1, _ := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	res2, _ := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, string(rec1_expect), string(res1))
 	assert.Equal(t, string(rec2_expect), string(res2))
 }
@@ -271,7 +273,7 @@ func getRecords(n int, resend bool) []int {
 	for i := 0; i < n; i++ {
 		go func() {
 			defer wg.Done()
-			res_bin, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: extra_param})
+			res_bin, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: extra_param})
 			if err != nil {
 				fmt.Println("error at read ", i)
 			}
@@ -316,13 +318,13 @@ func TestMongoDBGetLastAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(10)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 }
@@ -331,7 +333,7 @@ func TestMongoDBGetNextAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(200)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
 	n := 100
@@ -344,10 +346,10 @@ func TestMongoDBGetNextEmptyAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(10)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -357,7 +359,7 @@ func TestMongoDBgetRecordByID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -366,7 +368,7 @@ func TestMongoDBgetRecordByIDFails(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":1,\"next_stream\":\"\"}", err.Error())
 }
@@ -375,7 +377,7 @@ func TestMongoDBGetRecordNext(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -386,8 +388,8 @@ func TestMongoDBGetRecordNextMultipleCollections(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection2, &rec_dataset1)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
-	res_string, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection2, GroupId: groupId, Op: "next", DatasetOp: true})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
+	res_string, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection2, GroupId: groupId, Op: "next", DatasetOp: true})
 	var res_ds TestDataset
 	json.Unmarshal(res_string, &res_ds)
 
@@ -403,7 +405,7 @@ func TestMongoDBGetRecordID(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -412,7 +414,7 @@ func TestMongoDBWrongOp(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "bla"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "bla"})
 	assert.NotNil(t, err)
 }
 
@@ -422,7 +424,7 @@ func TestMongoDBGetRecordLast(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 }
@@ -433,13 +435,13 @@ func TestMongoDBGetNextAfterGetLastCorrect(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 
 	db.insertRecord(dbname, collection, &rec3)
 
-	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 
@@ -449,14 +451,14 @@ func TestMongoDBGetGetLastInGroupCorrect(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"}) // to check it does not influence groupedlast
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"}) // to check it does not influence groupedlast
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 
 // first record - ok, then error
-	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	res, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.NotNil(t, err)
 	if err != nil {
 		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
@@ -464,15 +466,15 @@ func TestMongoDBGetGetLastInGroupCorrect(t *testing.T) {
 	}
 // second record - ok, then error
 	db.insertRecord(dbname, collection, &rec2)
-	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	res, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
-	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	res, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.NotNil(t, err)
 
 // stream finished - immediately error
 	db.insertRecord(dbname, collection, &rec_finished3)
-	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	res, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.NotNil(t, err)
 	if err != nil {
 		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
@@ -487,9 +489,9 @@ func TestMongoDBGetGetLastInGroupImmediateErrorOnFinishStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec_finished3)
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.NotNil(t, err)
-	_, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
+	_, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "groupedlast", ExtraParam: ""})
 	assert.NotNil(t, err)
 	if err != nil {
 		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
@@ -506,7 +508,7 @@ func TestMongoDBGetSize(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec3)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(recs1_expect), string(res))
 }
@@ -517,7 +519,7 @@ func TestMongoDBGetSizeWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	var rec_expect, _ = json.Marshal(&SizeRecord{1})
 	assert.Equal(t, string(rec_expect), string(res))
@@ -528,10 +530,10 @@ func TestMongoDBGetSizeForDatasets(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "false"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size", ExtraParam: "false"})
 	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
 
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
+	_, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size", ExtraParam: "true"})
 	assert.Equal(t, utils.StatusWrongInput, err1.(*DBError).Code)
 }
 
@@ -541,7 +543,7 @@ func TestMongoDBGetSizeForDatasetsWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
+	res, _ := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size", ExtraParam: "true"})
 
 	var rec_expect, _ = json.Marshal(&SizeRecord{1})
 	assert.Equal(t, string(rec_expect), string(res))
@@ -556,7 +558,7 @@ func TestMongoDBGetSizeDataset(t *testing.T) {
 
 	size2_expect, _ := json.Marshal(SizeRecord{2})
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size", ExtraParam: "true"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(size2_expect), string(res))
 }
@@ -565,7 +567,7 @@ func TestMongoDBGetSizeNoRecords(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(recs2_expect), string(res))
 }
@@ -583,7 +585,7 @@ func TestMongoPingNotConected(t *testing.T) {
 }
 
 func TestMongoDBgetRecordByIDNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
@@ -593,15 +595,15 @@ func TestMongoDBResetCounter(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Nil(t, err1)
 	assert.Equal(t, string(rec1_expect), string(res1))
 
-	_, err_reset := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "1"})
+	_, err_reset := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "1"})
 	assert.Nil(t, err_reset)
 
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res2, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Nil(t, err2)
 	assert.Equal(t, string(rec2_expect), string(res2))
@@ -613,7 +615,7 @@ func TestMongoDBGetMetaBtOK(t *testing.T) {
 	rec_expect, _ := json.Marshal(recbt.Meta)
 	db.insertMeta(dbname, &recbt)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: "whatever", Op: "meta", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: "whatever", Op: "meta", ExtraParam: "0"})
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec_expect), string(res))
@@ -625,7 +627,7 @@ func TestMongoDBGetMetaStOK(t *testing.T) {
 	rec_expect, _ := json.Marshal(recst.Meta)
 	db.insertMeta(dbname, &recst)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "1"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "meta", ExtraParam: "1"})
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec_expect), string(res))
@@ -635,7 +637,7 @@ func TestMongoDBGetMetaErr(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "meta", ExtraParam: "1"})
 	assert.NotNil(t, err)
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 }
@@ -711,7 +713,7 @@ func TestMongoDBQueryMessagesOK(t *testing.T) {
 		//			continue
 		//		}
 
-		res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: test.query})
+		res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "querymessages", ExtraParam: test.query})
 		var res []TestRecordMeta
 		json.Unmarshal(res_string, &res)
 		//		fmt.Println(string(res_string))
@@ -730,7 +732,7 @@ func TestMongoDBQueryMessagesOnEmptyDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	for _, test := range tests {
-		res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: test.query})
+		res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, Op: "querymessages", ExtraParam: test.query})
 		var res []TestRecordMeta
 		json.Unmarshal(res_string, &res)
 		assert.Equal(t, 0, len(res))
@@ -756,7 +758,7 @@ func TestMongoDBGetDataset(t *testing.T) {
 
 	db.insertRecord(dbname, collection, &rec_dataset1)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Nil(t, err)
 
@@ -772,7 +774,7 @@ func TestMongoDBNoDataOnNotCompletedFirstDataset(t *testing.T) {
 
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Equal(t, utils.StatusPartialData, err.(*DBError).Code)
 	var res TestDataset
@@ -787,8 +789,8 @@ func TestMongoDBNoDataOnNotCompletedNextDataset(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_dataset2_incomplete)
 
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Equal(t, utils.StatusPartialData, err1.(*DBError).Code)
 	assert.Equal(t, utils.StatusPartialData, err2.(*DBError).Code)
@@ -804,7 +806,7 @@ func TestMongoDBGetRecordLastDataSetSkipsIncompleteSets(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
@@ -821,7 +823,7 @@ func TestMongoDBGetRecordLastDataSetReturnsIncompleteSets(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
@@ -839,7 +841,7 @@ func TestMongoDBGetRecordLastDataSetSkipsIncompleteSetsWithMinSize(t *testing.T)
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2_incomplete3)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
@@ -856,7 +858,7 @@ func TestMongoDBGetRecordLastDataSetWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, ExtraParam: "0"})
 
 	assert.NotNil(t, err)
@@ -873,7 +875,7 @@ func TestMongoDBGetRecordLastDataSetWithIncompleteDatasetsAndFinishedStreamRetur
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 2, ExtraParam: "0"})
 
 	assert.NotNil(t, err)
@@ -890,7 +892,7 @@ func TestMongoDBGetRecordLastDataSetOK(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset3)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
@@ -905,7 +907,7 @@ func TestMongoDBGetDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -921,7 +923,7 @@ func TestMongoDBErrorOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Equal(t, utils.StatusPartialData, err.(*DBError).Code)
 
@@ -937,7 +939,7 @@ func TestMongoDBOkOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, MinDatasetSize: 3, ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, MinDatasetSize: 3, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -984,7 +986,7 @@ func TestMongoDBListStreams(t *testing.T) {
 		}
 		var rec_streams_expect, _ = json.Marshal(test.expectedStreams)
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: "0", Op: "streams", ExtraParam: utils.EncodeTwoStrings(test.from,"")})
+		res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: "0", Op: "streams", ExtraParam: utils.EncodeTwoStrings(test.from,"")})
 		if test.ok {
 			assert.Nil(t, err, test.test)
 			assert.Equal(t, string(rec_streams_expect), string(res), test.test)
@@ -1004,7 +1006,7 @@ func TestMongoDBAckMessage(t *testing.T) {
 
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 
-	request := Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
+	request := Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
 	res, err := db.ProcessRequest(request)
 	nacks, _ := db.getNacks(request, 0, 0)
 	assert.Nil(t, err)
@@ -1041,12 +1043,12 @@ func TestMongoDBNacks(t *testing.T) {
 			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
 		}
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "nacks", ExtraParam: test.rangeString})
+		res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "nacks", ExtraParam: test.rangeString})
 		if test.ok {
 			assert.Nil(t, err, test.test)
 			assert.Equal(t, test.resString, string(res), test.test)
@@ -1076,12 +1078,12 @@ func TestMongoDBLastAcks(t *testing.T) {
 			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
 		}
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "lastack"})
+		res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "lastack"})
 		assert.Nil(t, err, test.test)
 		assert.Equal(t, test.resString, string(res), test.test)
 		cleanup()
@@ -1095,8 +1097,8 @@ func TestMongoDBGetNextUsesInprocessedImmedeatly(t *testing.T) {
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1109,9 +1111,9 @@ func TestMongoDBGetNextUsesInprocessedNumRetry(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	_, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1129,10 +1131,10 @@ func TestMongoDBGetNextUsesInprocessedAfterTimeout(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	time.Sleep(time.Second)
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res2, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1148,10 +1150,10 @@ func TestMongoDBGetNextReturnsToNormalAfterUsesInprocessed(t *testing.T) {
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec_finished3)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	time.Sleep(time.Second)
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res2, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1166,8 +1168,8 @@ func TestMongoDBGetNextUsesInprocessedImmedeatlyIfFinishedStream(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Equal(t, string(rec1_expect), string(res))
@@ -1180,9 +1182,9 @@ func TestMongoDBGetNextUsesInprocessedImmedeatlyIfEndofStream(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res2, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1196,11 +1198,11 @@ func TestMongoDBAckDeletesInprocessed(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
-	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	_, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.NotNil(t, err)
 	if err != nil {
 		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
@@ -1214,8 +1216,8 @@ func TestMongoDBAckTwiceErrors(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
-	_,err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	_,err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
 	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
 }
 
@@ -1234,14 +1236,14 @@ func TestMongoDBNegAck(t *testing.T) {
 	inputParams.Params.DelayMs = 0
 
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})
 	bparam, _ := json.Marshal(&inputParams)
 
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"}) // first time message from negack
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})  // second time nothing
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"}) // second time nothing
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"}) // first time message from negack
+	_, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"})  // second time nothing
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
+	_, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next"}) // second time nothing
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
@@ -1260,12 +1262,12 @@ func TestMongoDBGetNextClearsInprocessAfterReset(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res, err := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res1, err1 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
-	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "0"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res3, err3 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "0"})
+	res2, err2 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res3, err3 := db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1295,16 +1297,16 @@ func TestDeleteStreams(t *testing.T) {
 	for _, test := range testsDeleteStream {
 		db.Connect(dbaddress)
 		db.insertRecord(dbname, encodeStringForColName(test.stream), &rec1)
-		db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "123", Op: "next"})
+		db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: test.stream, GroupId: "123", Op: "next"})
 		query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-		request := Request{DbName: dbname, Stream: test.stream, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
+		request := Request{Beamtime:beamtime, DataSource:datasource, Stream: test.stream, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
 		_, err := db.ProcessRequest(request)
 		assert.Nil(t, err, test.message)
-		_, err = db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		_, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
 		if test.ok {
-			rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
-			acks_exist,_:= db.collectionExist(Request{DbName: dbname, ExtraParam: ""},acks_collection_name_prefix+test.stream)
-			inprocess_exist,_:= db.collectionExist(Request{DbName: dbname, ExtraParam: ""},inprocess_collection_name_prefix+test.stream)
+			rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
+			acks_exist,_:= db.collectionExist(Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""},acks_collection_name_prefix+test.stream)
+			inprocess_exist,_:= db.collectionExist(Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""},inprocess_collection_name_prefix+test.stream)
 			assert.Equal(t,0,len(rec.Streams),test.message)
 			assert.Equal(t,false,acks_exist,test.message)
 			assert.Equal(t,false,inprocess_exist,test.message)
@@ -1312,7 +1314,7 @@ func TestDeleteStreams(t *testing.T) {
 		} else {
 			assert.NotNil(t, err, test.message)
 		}
-		_, err = db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		_, err = db.ProcessRequest(Request{Beamtime:beamtime, DataSource:datasource, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
 		if test.ok2 {
 			assert.Nil(t, err, test.message+" 2")
 		} else {
@@ -1323,7 +1325,8 @@ func TestDeleteStreams(t *testing.T) {
 
 
 var testsEncodings = []struct {
-	dbname          string
+	beamtime          string
+	datasource          string
 	collection      string
 	group			string
 	dbname_indb          string
@@ -1332,10 +1335,10 @@ var testsEncodings = []struct {
 	message string
 	ok              bool
 }{
-	{"dbname", "col", "group", "dbname","col","group", "no encoding",true},
-	{"dbname"+badSymbolsDb, "col", "group", "dbname"+badSymbolsDbEncoded,"col","group", "symbols in db",true},
-	{"dbname", "col"+badSymbolsCol, "group"+badSymbolsCol, "dbname","col"+badSymbolsColEncoded,"group"+badSymbolsColEncoded, "symbols in col",true},
-	{"dbname"+badSymbolsDb, "col"+badSymbolsCol, "group"+badSymbolsCol, "dbname"+badSymbolsDbEncoded,"col"+badSymbolsColEncoded,"group"+badSymbolsColEncoded, "symbols in col and db",true},
+	{"bt","dbname", "col", "group", "bt_dbname","col","group", "no encoding",true},
+	{"bt","dbname"+badSymbolsDb, "col", "group", "bt_dbname"+badSymbolsDbEncoded,"col","group", "symbols in db",true},
+	{"bt","dbname", "col"+badSymbolsCol, "group"+badSymbolsCol, "bt_dbname","col"+badSymbolsColEncoded,"group"+badSymbolsColEncoded, "symbols in col",true},
+	{"bt","dbname"+badSymbolsDb, "col"+badSymbolsCol, "group"+badSymbolsCol, "bt_dbname"+badSymbolsDbEncoded,"col"+badSymbolsColEncoded,"group"+badSymbolsColEncoded, "symbols in col and db",true},
 
 }
 
@@ -1343,7 +1346,7 @@ func TestMongoDBEncodingOK(t *testing.T) {
 	for _, test := range testsEncodings {
 		db.Connect(dbaddress)
 		db.insertRecord(test.dbname_indb, test.collection_indb, &rec1)
-		res, err := db.ProcessRequest(Request{DbName: test.dbname, Stream: test.collection, GroupId: test.group, Op: "next"})
+		res, err := db.ProcessRequest(Request{Beamtime:test.beamtime,DataSource: test.datasource, Stream: test.collection, GroupId: test.group, Op: "next"})
 		if test.ok {
 			assert.Nil(t, err, test.message)
 			assert.Equal(t, string(rec1_expect), string(res), test.message)
diff --git a/broker/src/asapo_broker/database/streams_test.go b/broker/src/asapo_broker/database/streams_test.go
index 4ba11e0b3986ff93ea26289054a11f573d670e5c..2bb15c0b575fc8d156d5e186ed6d3f1d4e663daf 100644
--- a/broker/src/asapo_broker/database/streams_test.go
+++ b/broker/src/asapo_broker/database/streams_test.go
@@ -28,16 +28,16 @@ func TestStreamsTestSuite(t *testing.T) {
 }
 
 func (suite *StreamsTestSuite) TestStreamsEmpty() {
-	rec, err := streams.getStreams(&db, Request{DbName: "test", ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:"test",DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Empty(rec.Streams, 0)
 }
 
 func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenEmpty() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
-	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(1, len(rec.Streams))
 }
@@ -45,9 +45,9 @@ func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenEmpty() {
 func (suite *StreamsTestSuite) TestStreamsUsesCache() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
 	db.insertRecord(dbname, collection, &rec2)
-	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(1), rec.Streams[0].Timestamp)
 	suite.Equal(false, rec.Streams[0].Finished)
@@ -60,15 +60,15 @@ func (suite *StreamsTestSuite) TestStreamsCacheexpires() {
 	var res1 StreamsRecord
 	go func() {
 		db.insertRecord(dbname, collection, &rec1)
-		streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+		streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 		db.insertRecord(dbname, collection, &rec_finished)
-		res1,_ = streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+		res1,_ = streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	}()
 	db.insertRecord(dbname, collection+"1", &rec1_later)
-	res2,_ := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	res2,_ := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	db.insertRecord(dbname, collection+"1", &rec_finished)
 	time.Sleep(time.Second)
-	res3, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	res3, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(true, res3.Streams[0].Finished)
 	fmt.Println(res1,res2)
@@ -80,7 +80,7 @@ func (suite *StreamsTestSuite) TestStreamsGetFinishedInfo() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(0), rec.Streams[0].Timestamp)
 	suite.Equal(true, rec.Streams[0].Finished)
@@ -92,7 +92,7 @@ func (suite *StreamsTestSuite) TestStreamsDataSetsGetFinishedInfo() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(1), rec.Streams[0].Timestamp)
 	suite.Equal(int64(2), rec.Streams[0].TimestampLast)
@@ -106,8 +106,8 @@ func (suite *StreamsTestSuite) TestStreamsMultipleRequests() {
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
 	db.insertRecord(dbname, collection2, &rec_dataset1_incomplete)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: "0/unfinished"})
-	rec2, err2 := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: "0/finished"})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: "0/unfinished"})
+	rec2, err2 := streams.getStreams(&db, Request{Beamtime:beamtime, DataSource:datasource, ExtraParam: "0/finished"})
 	suite.Nil(err)
 	suite.Equal(collection2, rec.Streams[0].Name)
 	suite.Equal(1, len(rec.Streams))
@@ -119,10 +119,10 @@ func (suite *StreamsTestSuite) TestStreamsMultipleRequests() {
 func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenExpired() {
 	db.settings.UpdateStreamCachePeriodMs = 10
 	db.insertRecord(dbname, collection, &rec2)
-	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	streams.getStreams(&db, Request{Beamtime:beamtime,DataSource:datasource, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
 	time.Sleep(time.Millisecond * 100)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime,DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(1), rec.Streams[0].Timestamp)
 }
@@ -130,9 +130,9 @@ func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenExpired() {
 func (suite *StreamsTestSuite) TestStreamRemovesDatabase() {
 	db.settings.UpdateStreamCachePeriodMs = 0
 	db.insertRecord(dbname, collection, &rec1)
-	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	streams.getStreams(&db, Request{Beamtime:beamtime,DataSource:datasource, ExtraParam: ""})
 	db.dropDatabase(dbname)
-	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	rec, err := streams.getStreams(&db, Request{Beamtime:beamtime,DataSource:datasource, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Empty(rec.Streams, 0)
 }
@@ -143,18 +143,18 @@ var streamFilterTests=[]struct{
 	streams []string
 	message string
 }{
-	{request: Request{DbName:dbname, ExtraParam:""},error: false,streams: []string{collection,collection2},message: "default all streams"},
-	{request: Request{DbName:dbname, ExtraParam:"0/"},error: false,streams: []string{collection,collection2},message: "default 0/ all streams"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection,"")},error: false,streams: []string{collection,collection2},message: "first parameter only -  all streams"},
-	{request: Request{DbName:dbname, ExtraParam:"0/all"},error: false,streams: []string{collection,collection2},message: "second parameter only -  all streams"},
-	{request: Request{DbName:dbname, ExtraParam:"0/finished"},error: false,streams: []string{collection2},message: "second parameter only -  finished streams"},
-	{request: Request{DbName:dbname, ExtraParam:"0/unfinished"},error: false,streams: []string{collection},message: "second parameter only -  unfinished streams"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection2,"all")},error: false,streams: []string{collection2},message: "from stream2"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection2,"unfinished")},error: false,streams: []string{},message: "from stream2 and filter"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection2,"bla")},error: true,streams: []string{},message: "wrong filter"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection2,"all_aaa")},error: true,streams: []string{},message: "wrong filter2"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings("blabla","")},error: false,streams: []string{},message: "from unknown stream returns nothing"},
-	{request: Request{DbName:dbname, ExtraParam:utils.EncodeTwoStrings(collection2,"")},error: false,streams: []string{collection2},message: "from stream2, first parameter only"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource,ExtraParam:""},error: false,streams: []string{collection,collection2},message: "default all streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:"0/"},error: false,streams: []string{collection,collection2},message: "default 0/ all streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection,"")},error: false,streams: []string{collection,collection2},message: "first parameter only -  all streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:"0/all"},error: false,streams: []string{collection,collection2},message: "second parameter only -  all streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:"0/finished"},error: false,streams: []string{collection2},message: "second parameter only -  finished streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:"0/unfinished"},error: false,streams: []string{collection},message: "second parameter only -  unfinished streams"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection2,"all")},error: false,streams: []string{collection2},message: "from stream2"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection2,"unfinished")},error: false,streams: []string{},message: "from stream2 and filter"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection2,"bla")},error: true,streams: []string{},message: "wrong filter"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection2,"all_aaa")},error: true,streams: []string{},message: "wrong filter2"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings("blabla","")},error: false,streams: []string{},message: "from unknown stream returns nothing"},
+	{request: Request{Beamtime:beamtime,DataSource:datasource, ExtraParam:utils.EncodeTwoStrings(collection2,"")},error: false,streams: []string{collection2},message: "from stream2, first parameter only"},
 }
 
 func (suite *StreamsTestSuite) TestStreamFilters() {
diff --git a/broker/src/asapo_broker/server/authorizer_test.go b/broker/src/asapo_broker/server/authorizer_test.go
index a58681460c80b6a412091f6982afca7c6eecdc14..f854b224667b1fc004a9e36ef229ad20fa15b5bd 100644
--- a/broker/src/asapo_broker/server/authorizer_test.go
+++ b/broker/src/asapo_broker/server/authorizer_test.go
@@ -47,7 +47,7 @@ func responseOk() (*http.Response, error) {
 }
 
 func responseUnauth() (*http.Response, error) {
-	r := ioutil.NopCloser(bytes.NewReader([]byte("wrong JWT token")))
+	r := ioutil.NopCloser(bytes.NewReader([]byte("wrong or expired JWT token")))
 	return &http.Response{
 		StatusCode: http.StatusUnauthorized,
 		Body:       r,
diff --git a/broker/src/asapo_broker/server/get_commands_test.go b/broker/src/asapo_broker/server/get_commands_test.go
index 17f3aea5a32a1308f3ec4e7e49f5d894cd6c2aea..0c5b4a91868570757dc9181e1b93aa25771fa8a2 100644
--- a/broker/src/asapo_broker/server/get_commands_test.go
+++ b/broker/src/asapo_broker/server/get_commands_test.go
@@ -60,8 +60,11 @@ var testsGetCommand = []struct {
 
 func (suite *GetCommandsTestSuite) TestGetCommandsCallsCorrectRoutine() {
 	for _, test := range testsGetCommand {
-		suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: test.stream, GroupId: test.groupid, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
-		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request "+test.command)))
+		suite.mock_db.On("ProcessRequest", database.Request{Beamtime: expectedBeamtimeId, DataSource: test.source, Stream: test.stream, GroupId: test.groupid, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
+		logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap(test.command)))
+		logger.MockLog.On("Debug", mock.Anything)
+
+
 		w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + test.source + "/" + test.reqString+correctTokenSuffix+test.queryParams)
 		suite.Equal(http.StatusOK, w.Code, test.command+ " OK")
 		suite.Equal("Hello", string(w.Body.Bytes()), test.command+" sends data")
@@ -83,9 +86,9 @@ func (suite *GetCommandsTestSuite) TestGetCommandsCorrectlyProcessedEncoding() {
 		test.reqString = strings.Replace(test.reqString,test.groupid,encodedGroup,1)
 		test.reqString = strings.Replace(test.reqString,test.source,encodedSource,1)
 		test.reqString = strings.Replace(test.reqString,test.stream,encodedStream,1)
-		dbname := expectedBeamtimeId + "_" + newsource
-		suite.mock_db.On("ProcessRequest", database.Request{DbName: dbname, Stream: newstream, GroupId: newgroup, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
-		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request "+test.command)))
+		suite.mock_db.On("ProcessRequest", database.Request{Beamtime: expectedBeamtimeId,DataSource: newsource, Stream: newstream, GroupId: newgroup, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
+		logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap(test.command)))
+		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
 		w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + encodedSource + "/" + test.reqString+correctTokenSuffix+test.queryParams)
 		suite.Equal(http.StatusOK, w.Code, test.command+ " OK")
 		suite.Equal("Hello", string(w.Body.Bytes()), test.command+" sends data")
diff --git a/broker/src/asapo_broker/server/get_meta_test.go b/broker/src/asapo_broker/server/get_meta_test.go
index b54a72865f02d4358b4cfc8abf4f2a0bb6678acf..75367d998ca893f0533fe8f57732606b7ef3750b 100644
--- a/broker/src/asapo_broker/server/get_meta_test.go
+++ b/broker/src/asapo_broker/server/get_meta_test.go
@@ -33,8 +33,10 @@ func TestGetMetaTestSuite(t *testing.T) {
 }
 
 func (suite *GetMetaTestSuite) TestGetMetaOK() {
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream, Op: "meta", ExtraParam: "0"}).Return([]byte(""), nil)
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request meta")))
+	suite.mock_db.On("ProcessRequest", database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, Op: "meta", ExtraParam: "0"}).Return([]byte(""), nil)
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("meta")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/0/meta"  + "/0" + correctTokenSuffix,"GET")
 	suite.Equal(http.StatusOK, w.Code, "meta OK")
 }
diff --git a/broker/src/asapo_broker/server/post_create_group.go b/broker/src/asapo_broker/server/post_create_group.go
index 008e72f14d4bf36022a094c923ab301d7ed2bf36..ba1ae49c478b885c8bdd8339b0707010f5fecd46 100644
--- a/broker/src/asapo_broker/server/post_create_group.go
+++ b/broker/src/asapo_broker/server/post_create_group.go
@@ -14,6 +14,6 @@ func routeCreateGroupID(w http.ResponseWriter, r *http.Request) {
 
 	guid := xid.New()
 	w.Write([]byte(guid.String()))
-	logger.Debug("generated new group: " + guid.String())
+	logger.WithFields(map[string]interface{}{"guid":guid.String()}).Debug("generated new group")
 	statistics.IncreaseCounter()
 }
diff --git a/broker/src/asapo_broker/server/post_create_group_test.go b/broker/src/asapo_broker/server/post_create_group_test.go
index dcef0d009e109426d8cb95e9fc5dabd31a0b7692..3189bb46f0cb815f4644bcc28cb31252f02d1a73 100644
--- a/broker/src/asapo_broker/server/post_create_group_test.go
+++ b/broker/src/asapo_broker/server/post_create_group_test.go
@@ -18,7 +18,9 @@ func GetObjectID(t *testing.T) (xid.ID, error) {
 func TestGetNewGroup(t *testing.T) {
 	statistics.Reset()
 	logger.SetMockLog()
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("generated new group")))
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("guid")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("generated new group")))
+
 
 	id1, err := GetObjectID(t)
 	assert.Nil(t, err, "first is ObjectID")
diff --git a/broker/src/asapo_broker/server/post_op_image_test.go b/broker/src/asapo_broker/server/post_op_image_test.go
index 2cc3159ee6a3469490ed3ec082947270e8e49db4..facf1922c6dec06c2231de978e96d98da5ed162e 100644
--- a/broker/src/asapo_broker/server/post_op_image_test.go
+++ b/broker/src/asapo_broker/server/post_op_image_test.go
@@ -34,8 +34,9 @@ func TestMessageOpTestSuite(t *testing.T) {
 
 func (suite *MessageOpTestSuite) TestAckMessageOpOK() {
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "ackmessage", ExtraParam: query_str}).Return([]byte(""), nil)
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request ackmessage")))
+	suite.mock_db.On("ProcessRequest", database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, Op: "ackmessage", ExtraParam: query_str}).Return([]byte(""), nil)
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("ackmessage")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
 	suite.Equal(http.StatusOK, w.Code, "ackmessage OK")
 }
diff --git a/broker/src/asapo_broker/server/post_query_images_test.go b/broker/src/asapo_broker/server/post_query_images_test.go
index d51d2490ab3b0063ff078b92430b08cfeb28db1f..28809d4aa9eee1837890ba49b4117a4cc322f505 100644
--- a/broker/src/asapo_broker/server/post_query_images_test.go
+++ b/broker/src/asapo_broker/server/post_query_images_test.go
@@ -35,8 +35,10 @@ func TestQueryTestSuite(t *testing.T) {
 func (suite *QueryTestSuite) TestQueryOK() {
 	query_str := "aaaa"
 
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream,Op: "querymessages", ExtraParam: query_str}).Return([]byte("{}"), nil)
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request querymessages")))
+	suite.mock_db.On("ProcessRequest", database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream,Op: "querymessages", ExtraParam: query_str}).Return([]byte("{}"), nil)
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("querymessages")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+
 
 	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/0/querymessages"+correctTokenSuffix, "POST", query_str)
 	suite.Equal(http.StatusOK, w.Code, "Query OK")
diff --git a/broker/src/asapo_broker/server/post_reset_counter_test.go b/broker/src/asapo_broker/server/post_reset_counter_test.go
index 64291bee21024ef2b5dbee377c2ff2b3aec3aeaf..84ace072d9cac0872b8f262dbccb72c918af2280 100644
--- a/broker/src/asapo_broker/server/post_reset_counter_test.go
+++ b/broker/src/asapo_broker/server/post_reset_counter_test.go
@@ -33,10 +33,11 @@ func TestResetCounterTestSuite(t *testing.T) {
 }
 
 func (suite *ResetCounterTestSuite) TestResetCounterOK() {
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId:expectedGroupID, Op: "resetcounter", ExtraParam: "10"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId:expectedGroupID, Op: "resetcounter", ExtraParam: "10"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""), nil)
 
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request resetcounter")))
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("resetcounter")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
 
 	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/"+expectedGroupID+"/resetcounter"+correctTokenSuffix+"&value=10", "POST")
 	suite.Equal(http.StatusOK, w.Code, "ResetCounter OK")
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 41b6564b7a91f5d2902febfbfbd3f58f8d207ac9..8a0065fc2806ada24caa414e825e3e35b4c97778 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -63,19 +63,21 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 
 
 	w.Header().Set("Access-Control-Allow-Origin", "*")
-	db_name, datasource, stream, group_id, ok := extractRequestParameters(r, needGroupID)
+	beamtime, datasource, stream, group_id, ok := extractRequestParameters(r, needGroupID)
 	if !ok {
+		log.WithFields(map[string]interface{}{"request":r.RequestURI}).Error("cannot extract request parameters")
 		w.WriteHeader(http.StatusBadRequest)
 		return
 	}
 
-	if err := authorize(r, db_name, needWriteAccess(op)); err != nil {
-		writeAuthAnswer(w, "get "+op, db_name, err)
+	if err := authorize(r, beamtime, needWriteAccess(op)); err != nil {
+		writeAuthAnswer(w, "get "+op, beamtime, err)
 		return
 	}
 
 	request := database.Request{}
-	request.DbName = db_name+"_"+datasource
+	request.Beamtime = beamtime
+	request.DataSource = datasource
 	request.Op = op
 	request.ExtraParam = extra_param
 	request.Stream = stream
@@ -85,17 +87,19 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 		request.MinDatasetSize = minSize
 	}
 
-	answer, code := processRequestInDb(request)
+	rlog:=request.Logger()
+	rlog.Debug("got request")
+	answer, code := processRequestInDb(request,rlog)
 	w.WriteHeader(code)
 	w.Write(answer)
 }
 
-func returnError(err error, log_str string) (answer []byte, code int) {
+func returnError(err error, rlog logger.Logger) (answer []byte, code int) {
 	code = database.GetStatusCodeFromError(err)
 	if code != utils.StatusNoData && code != utils.StatusPartialData{
-		logger.Error(log_str + " - " + err.Error())
+		rlog.WithFields(map[string]interface{}{"cause":err.Error()}).Error("cannot process request")
 	} else {
-		logger.Debug(log_str + " - " + err.Error())
+		rlog.WithFields(map[string]interface{}{"cause":err.Error()}).Debug("no data or partial data")
 	}
 	return []byte(err.Error()), code
 }
@@ -107,20 +111,20 @@ func reconnectIfNeeded(db_error error) {
 	}
 
 	if err := ReconnectDb(); err != nil {
-		log.Error("cannot reconnect to database at : " + settings.GetDatabaseServer() + " " + err.Error())
+		log.WithFields(map[string]interface{}{"address":settings.GetDatabaseServer(),"cause": err.Error()}).Error("cannot reconnect to database")
 	} else {
-		log.Debug("reconnected to database" + settings.GetDatabaseServer())
+		log.WithFields(map[string]interface{}{"address":settings.GetDatabaseServer()}).Debug("reconnected to database")
 	}
 }
 
-func processRequestInDb(request database.Request) (answer []byte, code int) {
+
+
+func processRequestInDb(request database.Request,rlog logger.Logger) (answer []byte, code int) {
 	statistics.IncreaseCounter()
 	answer, err := db.ProcessRequest(request)
-	log_str := "processing request " + request.Op + " in " + request.DbName + " at " + settings.GetDatabaseServer()
 	if err != nil {
 		go reconnectIfNeeded(err)
-		return returnError(err, log_str)
+		return returnError(err, rlog)
 	}
-	logger.Debug(log_str)
 	return answer, utils.StatusOK
 }
diff --git a/broker/src/asapo_broker/server/process_request_test.go b/broker/src/asapo_broker/server/process_request_test.go
index cf0d41626723cd026791b9c9c9ac9471d78c0ce2..a9c5f53f36bd0e760ee7bcb1d9bf624b0d50d9a0 100644
--- a/broker/src/asapo_broker/server/process_request_test.go
+++ b/broker/src/asapo_broker/server/process_request_test.go
@@ -45,7 +45,7 @@ func (a *MockAuthServer) AuthorizeToken(tokenJWT string) (token Token, err error
 		}, nil
 	}
 
-	return Token{}, AuthorizationError{errors.New("wrong JWT token"),http.StatusUnauthorized}
+	return Token{}, &AuthorizationError{errors.New("wrong or expired JWT token"),http.StatusUnauthorized}
 }
 
 func prepareTestAuth() {
@@ -66,7 +66,19 @@ type request struct {
 	message string
 }
 
-func containsMatcher(substrings ...string) func(str string) bool {
+func containsMatcherMap(substrings ...string) func(map[string]interface{}) bool {
+	return func(vals map[string]interface{}) bool {
+		res,_:=utils.MapToJson(vals)
+		for _, substr := range substrings {
+			if !strings.Contains(string(res), substr) {
+				return false
+			}
+		}
+		return true
+	}
+}
+
+func containsMatcherStr(substrings ...string) func(str string) bool {
 	return func(str string) bool {
 		for _, substr := range substrings {
 			if !strings.Contains(str, substr) {
@@ -77,6 +89,7 @@ func containsMatcher(substrings ...string) func(str string) bool {
 	}
 }
 
+
 func doRequest(path string, extra_params ...string) *httptest.ResponseRecorder {
 	m := "GET"
 	if len(extra_params) > 0 {
@@ -134,7 +147,9 @@ func TestProcessRequestTestSuite(t *testing.T) {
 }
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongToken() {
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong JWT token")))
+
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("wrong or expired JWT token")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcherStr("cannot authorize request")))
 
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + suffixWithWrongToken)
 
@@ -142,7 +157,8 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongToken() {
 }
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithNoToken() {
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("cannot extract")))
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("cannot extract")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcherStr("cannot authorize request")))
 
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + wrongTokenSuffix)
 
@@ -151,12 +167,15 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithNoToken() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusNoData, ""})
 
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next")))
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("no data or partial data")))
+
 
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 
@@ -165,14 +184,16 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName()
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusServiceUnavailable, ""})
 
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("processing request next")))
+    logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcherStr("cannot process request")))
 	ExpectReconnect(suite.mock_db)
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("reconnected")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("reconnected")))
 
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 	time.Sleep(time.Second)
@@ -181,11 +202,14 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""), errors.New(""))
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("processing request next")))
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("reconnected")))
+
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcherStr("cannot process request")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("reconnected")))
 
 	ExpectReconnect(suite.mock_db)
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
@@ -196,10 +220,11 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
 
 	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 	suite.Equal(1, statistics.GetCounter(), "ProcessRequest increases counter")
@@ -207,10 +232,11 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsDataset() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, DatasetOp: true, Op: "next"}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: expectedGroupID, DatasetOp: true, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
 
 	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix + "&dataset=true")
 }
@@ -222,7 +248,9 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestErrorOnWrongProtocol() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamReadToken() {
 	query_str := "query_string"
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong token access")))
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("wrong token access")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcherStr("cannot authorize request")))
+
 	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/delete"+correctTokenSuffix, "POST", query_str)
 	suite.Equal(http.StatusUnauthorized, w.Code, "wrong token type")
 
@@ -231,9 +259,11 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamReadToken()
 func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamWriteToken() {
 	query_str := "query_string"
 
-	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: "", Op: "delete_stream", ExtraParam: query_str}
+	expectedRequest := database.Request{Beamtime: expectedBeamtimeId,DataSource: expectedSource, Stream: expectedStream, GroupId: "", Op: "delete_stream", ExtraParam: query_str}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request delete_stream in "+expectedDBName)))
+	logger.MockLog.On("WithFields", mock.MatchedBy(containsMatcherMap("delete_stream")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcherStr("got request")))
+
 	doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/delete"+correctTokenSuffixWrite, "POST", query_str)
 }
diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go
index 1a0d5e875034ed369f85128929ef3baae1c17c1f..cda3a09951c417368a5db5320f3823cc2ae2ff4b 100644
--- a/broker/src/asapo_broker/server/request_common.go
+++ b/broker/src/asapo_broker/server/request_common.go
@@ -8,12 +8,10 @@ import (
 	"strconv"
 )
 
-func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string, err error) {
-	log_str := "processing " + requestName + " request in " + db_name + " at " + settings.GetDatabaseServer()
-	logger.Error(log_str + " - " + err.Error())
-
+func writeAuthAnswer(w http.ResponseWriter, requestOp string, db_name string, err error) {
+	logger.WithFields(map[string]interface{}{"operation": requestOp, "cause": err.Error()}).Error("cannot authorize request")
 	switch er := err.(type) {
-	case AuthorizationError:
+	case *AuthorizationError:
 		w.WriteHeader(er.statusCode)
 	default:
 		w.WriteHeader(http.StatusServiceUnavailable)
@@ -54,7 +52,7 @@ func authorize(r *http.Request, beamtime_id string, needWriteAccess bool) error
 	tokenJWT := r.URL.Query().Get("token")
 
 	if len(tokenJWT) == 0 {
-		return AuthorizationError{errors.New("cannot extract token from request"),http.StatusBadRequest}
+		return &AuthorizationError{errors.New("cannot extract token from request"), http.StatusBadRequest}
 	}
 
 	token, err := auth.AuthorizeToken(tokenJWT)
@@ -67,23 +65,23 @@ func authorize(r *http.Request, beamtime_id string, needWriteAccess bool) error
 		return err
 	}
 
-	return checkAccessType(token.AccessTypes,needWriteAccess)
+	return checkAccessType(token.AccessTypes, needWriteAccess)
 }
 
 func checkSubject(subject string, beamtime_id string) error {
 	if subject != utils.SubjectFromBeamtime(beamtime_id) {
-		return AuthorizationError{errors.New("wrong token subject"),http.StatusUnauthorized}
+		return &AuthorizationError{errors.New("wrong token subject"), http.StatusUnauthorized}
 	}
 	return nil
 }
 
 func checkAccessType(accessTypes []string, needWriteAccess bool) error {
-	if needWriteAccess && !utils.StringInSlice("write",accessTypes) {
-		return AuthorizationError{errors.New("wrong token access type"),http.StatusUnauthorized}
+	if needWriteAccess && !utils.StringInSlice("write", accessTypes) {
+		return &AuthorizationError{errors.New("wrong token access type"), http.StatusUnauthorized}
 	}
 
-	if !utils.StringInSlice("read",accessTypes) {
-		return AuthorizationError{errors.New("wrong token access type"),http.StatusUnauthorized}
+	if !utils.StringInSlice("read", accessTypes) {
+		return &AuthorizationError{errors.New("wrong token access type"), http.StatusUnauthorized}
 	}
 	return nil
 }
diff --git a/common/cpp/include/asapo/common/error.h b/common/cpp/include/asapo/common/error.h
index 510ea0866dc7c3e1659e3b3eb9680d487fe8fee1..4c38ae3ca1445289b4bd7435141f5b63b08ea412 100644
--- a/common/cpp/include/asapo/common/error.h
+++ b/common/cpp/include/asapo/common/error.h
@@ -21,7 +21,8 @@ class ErrorInterface {
     virtual std::string Explain() const noexcept = 0;
     virtual std::string ExplainPretty(uint8_t shift = 0) const noexcept = 0;
     virtual std::string ExplainInJSON() const noexcept = 0;
-    virtual ErrorInterface* AddContext(std::string key, std::string value) noexcept = 0;
+    virtual ErrorInterface* AddDetails(std::string key, std::string value) noexcept = 0;
+    virtual ErrorInterface* AddDetails(std::string key, uint64_t value) noexcept = 0;
     virtual ErrorInterface* SetCause(Error cause_err) noexcept = 0;
     virtual const Error& GetCause() const noexcept = 0;
     virtual CustomErrorData* GetCustomData() noexcept = 0;
@@ -50,7 +51,7 @@ class ServiceError : public ErrorInterface {
     ServiceErrorType error_type_;
     std::string error_name_;
     std::string error_message_;
-    std::map<std::string, std::string> context_;
+    std::map<std::string, std::string> details_;
     Error cause_err_;
     std::unique_ptr<CustomErrorData> custom_data_;
   public:
@@ -58,7 +59,8 @@ class ServiceError : public ErrorInterface {
     ServiceErrorType GetServiceErrorType() const noexcept;
     CustomErrorData* GetCustomData() noexcept override;
     void SetCustomData(std::unique_ptr<CustomErrorData> data) noexcept override;
-    ErrorInterface* AddContext(std::string key, std::string value) noexcept override;
+    ErrorInterface* AddDetails(std::string key, std::string value) noexcept override;
+    ErrorInterface* AddDetails(std::string key, uint64_t value) noexcept override;
     ErrorInterface* SetCause(Error cause_err) noexcept override;
     const Error& GetCause() const noexcept override;
     std::string Explain() const noexcept override;
diff --git a/common/cpp/include/asapo/common/error.tpp b/common/cpp/include/asapo/common/error.tpp
index 95f6eed6606794d1596731640ce58b58614841c2..931abd205bd37b67dc2c2f017cc5ee9e37721224 100644
--- a/common/cpp/include/asapo/common/error.tpp
+++ b/common/cpp/include/asapo/common/error.tpp
@@ -1,6 +1,7 @@
-
 #include "error.h"
 
+#include "asapo/common/utils.h"
+
 namespace asapo {
 
 template<typename ServiceErrorType>
@@ -37,10 +38,10 @@ std::string ServiceError<ServiceErrorType>::ExplainPretty(uint8_t shift) const n
     if (!error_message_.empty()) {
         err += "\n" + base_shift + shift_s + "message: " + error_message_;
     }
-    if (!context_.empty()) {
-        err += "\n" + base_shift + shift_s + "context: ";
+    if (!details_.empty()) {
+        err += "\n" + base_shift + shift_s + "details: ";
         auto i = 0;
-        for (const auto &kv : context_) {
+        for (const auto &kv : details_) {
             err += (i > 0 ? ", " : "") + kv.first + ":" + kv.second;
             i++;
         }
@@ -58,10 +59,10 @@ std::string ServiceError<ServiceErrorType>::Explain() const noexcept {
     if (!error_message_.empty()) {
         err += ", message: " + error_message_;
     }
-    if (!context_.empty()) {
-        err += ", context: ";
+    if (!details_.empty()) {
+        err += ", details: ";
         auto i = 0;
-        for (const auto &kv : context_) {
+        for (const auto &kv : details_) {
             err += (i > 0 ? ", " : "") + kv.first + ":" + kv.second;
             i++;
         }
@@ -73,8 +74,8 @@ std::string ServiceError<ServiceErrorType>::Explain() const noexcept {
 }
 
 template<typename ServiceErrorType>
-ErrorInterface *ServiceError<ServiceErrorType>::AddContext(std::string key, std::string value) noexcept {
-    context_[std::move(key)] = std::move(value);
+ErrorInterface *ServiceError<ServiceErrorType>::AddDetails(std::string key, std::string value) noexcept {
+    details_[std::move(key)] = std::move(value);
     return this;
 }
 template<typename ServiceErrorType>
@@ -91,13 +92,13 @@ template<typename ServiceErrorType>
 std::string ServiceError<ServiceErrorType>::ExplainInJSON() const noexcept {
     std::string err = WrapInQuotes("error") + ":" + WrapInQuotes(error_name_);
     if (!error_message_.empty()) {
-        err += "," + WrapInQuotes("message") + ":" + WrapInQuotes(error_message_);
+        err += "," + WrapInQuotes("message") + ":" + WrapInQuotes(EscapeJson(error_message_));
     }
-    if (!context_.empty()) {
-        err += "," + WrapInQuotes("context") + ":{";
+    if (!details_.empty()) {
+        err += "," + WrapInQuotes("details") + ":{";
         auto i = 0;
-        for (const auto &kv : context_) {
-            err += (i > 0 ? ", " : "") + WrapInQuotes(kv.first) + ":" + WrapInQuotes(kv.second);
+        for (const auto &kv : details_) {
+            err += (i > 0 ? ", " : "") + WrapInQuotes(kv.first) + ":" + WrapInQuotes(EscapeJson(kv.second));
             i++;
         }
         err += "}";
@@ -112,6 +113,11 @@ const Error &ServiceError<ServiceErrorType>::GetCause() const noexcept {
     return cause_err_;
 }
 
+template<typename ServiceErrorType>
+ErrorInterface *ServiceError<ServiceErrorType>::AddDetails(std::string key, uint64_t value) noexcept {
+    return AddDetails(key,std::to_string(value));
+}
+
 template<typename ServiceErrorType>
 Error ServiceErrorTemplate<ServiceErrorType>::Generate() const noexcept {
     return Generate("");
diff --git a/common/cpp/include/asapo/common/io_error.h b/common/cpp/include/asapo/common/io_error.h
index ef15e630836fe854ea5f51f069f800413e52c5e6..52245430bc6d610dd2de2009b2dabd38d74d973e 100644
--- a/common/cpp/include/asapo/common/io_error.h
+++ b/common/cpp/include/asapo/common/io_error.h
@@ -36,39 +36,39 @@ using IOErrorTemplate = ServiceErrorTemplate<IOErrorType>;
 
 namespace IOErrorTemplates {
 auto const kUnknownIOError = IOErrorTemplate {
-    "Unknown Error", IOErrorType::kUnknownIOError
+    "unknown error", IOErrorType::kUnknownIOError
 };
 
 auto const kFileNotFound = IOErrorTemplate {
-    "No such file or directory", IOErrorType::kFileNotFound
+    "no such file or directory", IOErrorType::kFileNotFound
 };
 auto const kReadError = IOErrorTemplate {
-    "Read error", IOErrorType::kReadError
+    "read error", IOErrorType::kReadError
 };
 auto const kBadFileNumber = IOErrorTemplate {
-    "Bad file number", IOErrorType::kBadFileNumber
+    "bad file number", IOErrorType::kBadFileNumber
 };
 auto const kResourceTemporarilyUnavailable = IOErrorTemplate {
-    "Resource temporarily unavailable", IOErrorType::kResourceTemporarilyUnavailable
+    "resource temporarily unavailable", IOErrorType::kResourceTemporarilyUnavailable
 };
 
 auto const kPermissionDenied = IOErrorTemplate {
-    "Permission denied", IOErrorType::kPermissionDenied
+    "permission denied", IOErrorType::kPermissionDenied
 };
 auto const kUnsupportedAddressFamily = IOErrorTemplate {
-    "Unsupported address family", IOErrorType::kUnsupportedAddressFamily
+    "unsupported address family", IOErrorType::kUnsupportedAddressFamily
 };
 auto const kInvalidAddressFormat = IOErrorTemplate {
-    "Invalid address format", IOErrorType::kInvalidAddressFormat
+    "invalid address format", IOErrorType::kInvalidAddressFormat
 };
 auto const kAddressAlreadyInUse = IOErrorTemplate {
-    "Address already in use", IOErrorType::kAddressAlreadyInUse
+    "address already in use", IOErrorType::kAddressAlreadyInUse
 };
 auto const kConnectionRefused = IOErrorTemplate {
-    "Connection refused", IOErrorType::kConnectionRefused
+    "connection refused", IOErrorType::kConnectionRefused
 };
 auto const kNotConnected = IOErrorTemplate {
-    "Not connected", IOErrorType::kNotConnected
+    "not connected", IOErrorType::kNotConnected
 };
 
 auto const kConnectionResetByPeer = IOErrorTemplate {
@@ -101,11 +101,11 @@ auto const kSocketOperationValueOutOfBound =  IOErrorTemplate {
 };
 
 auto const kAddressNotValid =  IOErrorTemplate {
-    "Address not valid", IOErrorType::kAddressNotValid
+    "address not valid", IOErrorType::kAddressNotValid
 };
 
 auto const kBrokenPipe =  IOErrorTemplate {
-    "Broken pipe/connection", IOErrorType::kBrokenPipe
+    "broken pipe/connection", IOErrorType::kBrokenPipe
 };
 
 
diff --git a/common/cpp/include/asapo/common/utils.h b/common/cpp/include/asapo/common/utils.h
new file mode 100644
index 0000000000000000000000000000000000000000..103fb8f0a658cbc8d8802e92a39d419b437a650a
--- /dev/null
+++ b/common/cpp/include/asapo/common/utils.h
@@ -0,0 +1,49 @@
+#ifndef ASAPO_COMMON_CPP_INCLUDE_ASAPO_COMMON_UTILS_H_
+#define ASAPO_COMMON_CPP_INCLUDE_ASAPO_COMMON_UTILS_H_
+
+#include <iomanip>
+#include <sstream>
+
+
+namespace asapo {
+
+inline std::string EscapeJson(const std::string& s) {
+    std::ostringstream o;
+    for (auto c = s.cbegin(); c != s.cend(); c++) {
+        switch (*c) {
+            case '"':
+                o << "\\\"";
+                break;
+            case '\\':
+                o << "\\\\";
+                break;
+            case '\b':
+                o << "\\b";
+                break;
+            case '\f':
+                o << "\\f";
+                break;
+            case '\n':
+                o << "\\n";
+                break;
+            case '\r':
+                o << "\\r";
+                break;
+            case '\t':
+                o << "\\t";
+                break;
+            default:
+                if ('\x00' <= *c && *c <= '\x1f') {
+                    o << "\\u"
+                      << std::hex << std::setw(4) << std::setfill('0') << (int)*c;
+                } else {
+                    o << *c;
+                }
+        }
+    }
+    return o.str();
+}
+
+}
+
+#endif //ASAPO_COMMON_CPP_INCLUDE_ASAPO_COMMON_UTILS_H_
diff --git a/common/cpp/include/asapo/json_parser/json_parser.h b/common/cpp/include/asapo/json_parser/json_parser.h
index b8bd14f69bca16c77058354b965c6a5b4b4c84cc..a2baa383dc52f4b65afadae2029b5a70cae9f255 100644
--- a/common/cpp/include/asapo/json_parser/json_parser.h
+++ b/common/cpp/include/asapo/json_parser/json_parser.h
@@ -49,7 +49,6 @@ class JsonFileParser : public JsonParser {
     JsonFileParser(const std::string& json, const std::unique_ptr<IO>* io = nullptr): JsonParser(json, io) {};
 };
 
-
 }
 
 
diff --git a/common/cpp/include/asapo/logger/logger.h b/common/cpp/include/asapo/logger/logger.h
index e4dc868959f8b5347b95a7a51b19623f166a6e73..4bd212dc240a3f6d8cdaf1f1012cc156d51c100f 100644
--- a/common/cpp/include/asapo/logger/logger.h
+++ b/common/cpp/include/asapo/logger/logger.h
@@ -27,10 +27,11 @@ class LogMessageWithFields {
     LogMessageWithFields& Append(std::string key, uint64_t val);
     LogMessageWithFields& Append(std::string key, double val, int precision);
     LogMessageWithFields& Append(const LogMessageWithFields& log_msg);
+    LogMessageWithFields& Append(std::string key, const LogMessageWithFields& log_msg);
     LogMessageWithFields& Append(std::string key, std::string val);
     std::string LogString() const;
   private:
-    inline std::string QuoteIFNeeded();
+    inline std::string CommaIfNeeded();
     std::string log_string_;
 };
 
diff --git a/common/cpp/src/database/mongodb_client.cpp b/common/cpp/src/database/mongodb_client.cpp
index 50db4ae16cccf1f3dacd8701e708cb7e2c3ba968..bfb0d8ffbb6cf083740edbe149962206840251b9 100644
--- a/common/cpp/src/database/mongodb_client.cpp
+++ b/common/cpp/src/database/mongodb_client.cpp
@@ -46,7 +46,7 @@ Error MongoDBClient::Ping() {
     bson_destroy(&reply);
     bson_destroy(command);
 
-    return !retval ? DBErrorTemplates::kConnectionError.Generate() : nullptr;
+    return !retval ? DBErrorTemplates::kConnectionError.Generate("cannot ping database") : nullptr;
 
 }
 MongoDBClient::MongoDBClient() {
@@ -58,7 +58,7 @@ Error MongoDBClient::InitializeClient(const std::string& address) {
     client_ = mongoc_client_new(uri_str.c_str());
 
     if (client_ == nullptr) {
-        return DBErrorTemplates::kBadAddress.Generate();
+        return DBErrorTemplates::kBadAddress.Generate("cannot initialize database");
     }
 
     write_concern_ = mongoc_write_concern_new();
@@ -163,7 +163,7 @@ bson_p PrepareUpdateDocument(const uint8_t* json, Error* err) {
     std::string json_flat;
     auto parser_err = parser.GetFlattenedString("meta", ".", &json_flat);
     if (parser_err) {
-        *err = DBErrorTemplates::kJsonParseError.Generate("cannof flatten meta " + parser_err->Explain());
+        *err = DBErrorTemplates::kJsonParseError.Generate("cannof flatten meta ",std::move(parser_err));
         return nullptr;
     }
     bson_error_t mongo_err;
@@ -832,7 +832,7 @@ Error MongoDBClient::GetMetaFromDb(const std::string& collection, const std::str
     err = parser.Embedded("meta").GetRawString(res);
     if (err) {
         return DBErrorTemplates::kJsonParseError.Generate(
-                   "GetMetaFromDb: cannot parse database response: " + err->Explain());
+                   "GetMetaFromDb: cannot parse database response",std::move(err));
     }
     return nullptr;
 }
diff --git a/common/cpp/src/logger/logger.cpp b/common/cpp/src/logger/logger.cpp
index 546112130bee461f1dc39e9160e0ecddbe865454..e277c2da577b8f2d429b77bdb29627b4c4e3a58d 100644
--- a/common/cpp/src/logger/logger.cpp
+++ b/common/cpp/src/logger/logger.cpp
@@ -6,7 +6,7 @@
 
 namespace asapo {
 
-Logger CreateLogger(std::string name, bool console, bool centralized_log, const std::string& endpoint_uri) {
+Logger CreateLogger(std::string name, bool console, bool centralized_log, const std::string &endpoint_uri) {
     auto logger = new SpdLogger{name, endpoint_uri};
     logger->SetLogLevel(LogLevel::Info);
     if (console) {
@@ -19,15 +19,15 @@ Logger CreateLogger(std::string name, bool console, bool centralized_log, const
     return Logger{logger};
 }
 
-Logger CreateDefaultLoggerBin(const std::string& name) {
+Logger CreateDefaultLoggerBin(const std::string &name) {
     return CreateLogger(name, true, false, "");
 }
 
-Logger CreateDefaultLoggerApi(const std::string& name, const std::string& endpoint_uri) {
+Logger CreateDefaultLoggerApi(const std::string &name, const std::string &endpoint_uri) {
     return CreateLogger(name, false, true, endpoint_uri);
 }
 
-LogLevel StringToLogLevel(const std::string& name, Error* err) {
+LogLevel StringToLogLevel(const std::string &name, Error *err) {
     *err = nullptr;
     if (name == "debug") return LogLevel::Debug;
     if (name == "info") return LogLevel::Info;
@@ -40,7 +40,7 @@ LogLevel StringToLogLevel(const std::string& name, Error* err) {
 }
 
 template<typename ... Args>
-std::string string_format(const std::string& format, Args ... args) {
+std::string string_format(const std::string &format, Args ... args) {
     size_t size = static_cast<size_t>(snprintf(nullptr, 0, format.c_str(), args ...) + 1);
     std::unique_ptr<char[]> buf(new char[size]);
     snprintf(buf.get(), size, format.c_str(), args ...);
@@ -52,39 +52,40 @@ std::string EncloseQuotes(std::string str) {
 }
 
 LogMessageWithFields::LogMessageWithFields(std::string key, uint64_t val) {
-    log_string_ = EncloseQuotes(key) + ":" + std::to_string(val);
+    log_string_ = EncloseQuotes(std::move(key)) + ":" + std::to_string(val);
 }
 
 LogMessageWithFields::LogMessageWithFields(std::string key, double val, int precision) {
-    log_string_ = EncloseQuotes(key) + ":" + string_format("%." + std::to_string(precision) + "f", val);
+    log_string_ = EncloseQuotes(std::move(key)) + ":" + string_format("%." + std::to_string(precision) + "f", val);
 }
 
 LogMessageWithFields::LogMessageWithFields(std::string val) {
     if (!val.empty()) {
-        log_string_ = EncloseQuotes("message") + ":" + EncloseQuotes(escape_json(val));
+        log_string_ = EncloseQuotes("message") + ":" + EncloseQuotes(EscapeJson(val));
     }
 }
 
 LogMessageWithFields::LogMessageWithFields(std::string key, std::string val) {
-    log_string_ = EncloseQuotes(key) + ":" + EncloseQuotes(escape_json(val));
+    log_string_ = EncloseQuotes(std::move(key)) + ":" + EncloseQuotes(EscapeJson(val));
 }
 
-inline std::string LogMessageWithFields::QuoteIFNeeded() {
+inline std::string LogMessageWithFields::CommaIfNeeded() {
     return log_string_.empty() ? "" : ",";
 }
 
-LogMessageWithFields& LogMessageWithFields::Append(std::string key, uint64_t val) {
-    log_string_ += QuoteIFNeeded() + EncloseQuotes(key) + ":" + std::to_string(val);
+LogMessageWithFields &LogMessageWithFields::Append(std::string key, uint64_t val) {
+    log_string_ += CommaIfNeeded() + EncloseQuotes(std::move(key)) + ":" + std::to_string(val);
     return *this;
 }
 
-LogMessageWithFields& LogMessageWithFields::Append(std::string key, double val, int precision) {
-    log_string_ += QuoteIFNeeded() + EncloseQuotes(key) + ":" + string_format("%." + std::to_string(precision) + "f", val);
+LogMessageWithFields &LogMessageWithFields::Append(std::string key, double val, int precision) {
+    log_string_ += CommaIfNeeded() + EncloseQuotes(std::move(key)) + ":"
+        + string_format("%." + std::to_string(precision) + "f", val);
     return *this;
 }
 
-LogMessageWithFields& LogMessageWithFields::Append(std::string key, std::string val) {
-    log_string_ += QuoteIFNeeded() + EncloseQuotes(key) + ":" + EncloseQuotes(escape_json(val));
+LogMessageWithFields &LogMessageWithFields::Append(std::string key, std::string val) {
+    log_string_ += CommaIfNeeded() + EncloseQuotes(std::move(key)) + ":" + EncloseQuotes(EscapeJson(val));
     return *this;
 }
 
@@ -92,11 +93,17 @@ std::string LogMessageWithFields::LogString() const {
     return log_string_;
 }
 
-LogMessageWithFields::LogMessageWithFields(const Error& error) {
+LogMessageWithFields::LogMessageWithFields(const Error &error) {
     log_string_ = error->ExplainInJSON();
 }
-LogMessageWithFields& LogMessageWithFields::Append(const LogMessageWithFields& log_msg) {
-    log_string_ += QuoteIFNeeded() + log_msg.LogString();
+
+LogMessageWithFields &LogMessageWithFields::Append(const LogMessageWithFields &log_msg) {
+    log_string_ += CommaIfNeeded() + log_msg.LogString();
+    return *this;
+}
+
+LogMessageWithFields &LogMessageWithFields::Append(std::string key, const LogMessageWithFields &log_msg) {
+    log_string_ += CommaIfNeeded() + EncloseQuotes(std::move(key)) + ":{" + log_msg.LogString() + "}";
     return *this;
 }
 
diff --git a/common/cpp/src/logger/spd_logger.cpp b/common/cpp/src/logger/spd_logger.cpp
index 21f9f6ec381949f793b39b5cac63d05ab3ceb1e5..cf66d44ff91ffb9b0323d53d701c020a40aa0f59 100644
--- a/common/cpp/src/logger/spd_logger.cpp
+++ b/common/cpp/src/logger/spd_logger.cpp
@@ -3,8 +3,8 @@
 #include "fluentd_sink.h"
 
 #include <sstream>
-#include <iomanip>
 
+#include "asapo/common/utils.h"
 
 namespace asapo {
 
@@ -30,46 +30,9 @@ void SpdLogger::SetLogLevel(LogLevel level) {
     }
 }
 
-std::string escape_json(const std::string& s) {
-    std::ostringstream o;
-    for (auto c = s.cbegin(); c != s.cend(); c++) {
-        switch (*c) {
-        case '"':
-            o << "\\\"";
-            break;
-        case '\\':
-            o << "\\\\";
-            break;
-        case '\b':
-            o << "\\b";
-            break;
-        case '\f':
-            o << "\\f";
-            break;
-        case '\n':
-            o << "\\n";
-            break;
-        case '\r':
-            o << "\\r";
-            break;
-        case '\t':
-            o << "\\t";
-            break;
-        default:
-            if ('\x00' <= *c && *c <= '\x1f') {
-                o << "\\u"
-                  << std::hex << std::setw(4) << std::setfill('0') << (int)*c;
-            } else {
-                o << *c;
-            }
-        }
-    }
-    return o.str();
-}
-
 std::string EncloseMsg(std::string msg) {
     if (msg.find("\"") != 0) {
-        return std::string(R"("message":")") + escape_json(msg) + "\"";
+        return std::string(R"("message":")") + EscapeJson(msg) + "\"";
     } else {
         return msg;
     }
diff --git a/common/cpp/src/logger/spd_logger.h b/common/cpp/src/logger/spd_logger.h
index 1c2485f95f8520012223e06b4609a28aeffd89fd..4b4ea94a2942b18b560575adf0eadae903a372cb 100644
--- a/common/cpp/src/logger/spd_logger.h
+++ b/common/cpp/src/logger/spd_logger.h
@@ -38,7 +38,7 @@ class SpdLogger : public AbstractLogger {
 };
 
 std::string EncloseMsg(std::string msg);
-std::string escape_json(const std::string& s);
+std::string EscapeJson(const std::string& s);
 
 }
 
diff --git a/common/cpp/src/system_io/system_io.cpp b/common/cpp/src/system_io/system_io.cpp
index 3165a6a3d26c5c4654dbcb09f5f6dfd57fd015ed..d19f0f46fada88b333e15763c3ea027937e79e6c 100644
--- a/common/cpp/src/system_io/system_io.cpp
+++ b/common/cpp/src/system_io/system_io.cpp
@@ -116,7 +116,7 @@ MessageData SystemIO::GetDataFromFile(const std::string& fname, uint64_t* fsize,
 
     Read(fd, data_array, (size_t)*fsize, err);
     if (*err != nullptr) {
-        (*err)->AddContext("name", fname)->AddContext("expected size", std::to_string(*fsize));
+        (*err)->AddDetails("name", fname)->AddDetails("expected size", std::to_string(*fsize));
         Close(fd, nullptr);
         return nullptr;
     }
@@ -167,7 +167,8 @@ FileDescriptor SystemIO::OpenWithCreateFolders(const std::string& root_folder, c
     if (*err == IOErrorTemplates::kFileNotFound && create_directories)  {
         size_t pos = fname.rfind(kPathSeparator);
         if (pos == std::string::npos) {
-            *err = IOErrorTemplates::kFileNotFound.Generate(full_name);
+            *err = IOErrorTemplates::kFileNotFound.Generate();
+            (*err)->AddDetails("name",fname);
             return -1;
         }
         *err = CreateDirectoryWithParents(root_folder, fname.substr(0, pos));
@@ -191,7 +192,7 @@ Error SystemIO::WriteDataToFile(const std::string& root_folder, const std::strin
 
     Write(fd, data, length, &err);
     if (err) {
-        err->AddContext("name", fname);
+        err->AddDetails("name", fname);
         return err;
     }
 
@@ -402,7 +403,7 @@ asapo::FileDescriptor asapo::SystemIO::Open(const std::string& filename,
     FileDescriptor fd = _open(filename.c_str(), flags);
     if (fd == -1) {
         *err = GetLastError();
-        (*err)->AddContext("name", filename);
+        (*err)->AddDetails("name", filename);
     } else {
         *err = nullptr;
     }
@@ -616,7 +617,7 @@ Error SystemIO::CreateDirectoryWithParents(const std::string& root_path, const s
         Error err;
         CreateNewDirectory(new_path, &err);
         if (err && err != IOErrorTemplates::kFileAlreadyExists) {
-            err->AddContext("name", new_path);
+            err->AddDetails("name", new_path);
             return err;
         }
         if (iter != path.end()) {
diff --git a/common/cpp/src/system_io/system_io_linux.cpp b/common/cpp/src/system_io/system_io_linux.cpp
index f4256bddba5fdecb3e9bb9869943d03c9c642d81..18c4f1afe81635c58728981800b5a27c5cf9a3d6 100644
--- a/common/cpp/src/system_io/system_io_linux.cpp
+++ b/common/cpp/src/system_io/system_io_linux.cpp
@@ -28,7 +28,7 @@ Error SystemIO::AddToEpool(SocketDescriptor sd) const {
     event.data.fd = sd;
     if((epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, sd, &event) == -1) && (errno != EEXIST)) {
         auto err =  GetLastError();
-        err->AddContext("where", "add to epoll");
+        err->AddDetails("where", "add to epoll");
         close(epoll_fd_);
         return err;
     }
@@ -43,7 +43,7 @@ Error SystemIO::CreateEpoolIfNeeded(SocketDescriptor master_socket) const {
     epoll_fd_ = epoll_create1(0);
     if(epoll_fd_ == kDisconnectedSocketDescriptor) {
         auto err = GetLastError();
-        err->AddContext("where", "create epoll");
+        err->AddDetails("where", "create epoll");
         return err;
     }
     return AddToEpool(master_socket);
@@ -84,7 +84,7 @@ ListSocketDescriptors SystemIO::WaitSocketsActivity(SocketDescriptor master_sock
         }
         if (event_count < 0) {
             *err = GetLastError();
-            (*err)->AddContext("where", "epoll wait");
+            (*err)->AddDetails("where", "epoll wait");
             return {};
         }
 
diff --git a/common/cpp/src/system_io/system_io_linux_mac.cpp b/common/cpp/src/system_io/system_io_linux_mac.cpp
index 371c12e4d3464ac2478bab25c8733bf068f5989a..8aba5f76925436a59239ea434a42f1384dacb780 100644
--- a/common/cpp/src/system_io/system_io_linux_mac.cpp
+++ b/common/cpp/src/system_io/system_io_linux_mac.cpp
@@ -68,7 +68,7 @@ Error GetLastErrorFromErrno() {
         return IOErrorTemplates::kBrokenPipe.Generate();
     default:
         Error err = IOErrorTemplates::kUnknownIOError.Generate();
-        (*err).AddContext("Unknown error code: ", std::to_string(errno));
+            (*err).AddDetails("Unknown error code: ", std::to_string(errno));
         return err;
     }
 }
@@ -122,7 +122,7 @@ MessageMeta GetMessageMeta(const string& name, Error* err) {
 
     auto t_stat = FileStat(name, err);
     if (*err != nullptr) {
-        (*err)->AddContext("name", name);
+        (*err)->AddDetails("name", name);
         return MessageMeta{};
     }
 
@@ -157,7 +157,7 @@ void SystemIO::GetSubDirectoriesRecursively(const std::string& path, SubDirList*
     auto dir = opendir((path).c_str());
     if (dir == nullptr) {
         *err = GetLastError();
-        (*err)->AddContext("name", path);
+        (*err)->AddDetails("name", path);
         return;
     }
 
@@ -183,7 +183,7 @@ void SystemIO::CollectMessageMetarmationRecursively(const std::string& path,
     auto dir = opendir((path).c_str());
     if (dir == nullptr) {
         *err = GetLastError();
-        (*err)->AddContext("name", path);
+        (*err)->AddDetails("name", path);
         return;
     }
 
diff --git a/common/cpp/src/system_io/system_io_windows.cpp b/common/cpp/src/system_io/system_io_windows.cpp
index b847d747b70f0a9c4204ce375518453fbc505475..d8df3328f521effb03467346b5f8b8cef581d189 100644
--- a/common/cpp/src/system_io/system_io_windows.cpp
+++ b/common/cpp/src/system_io/system_io_windows.cpp
@@ -66,7 +66,7 @@ Error IOErrorFromGetLastError() {
     default:
         std::cout << "[IOErrorFromGetLastError] Unknown error code: " << last_error << std::endl;
         Error err = IOErrorTemplates::kUnknownIOError.Generate();
-        (*err).AddContext("Unknown error code", std::to_string(last_error));
+            (*err).AddDetails("Unknown error code", std::to_string(last_error));
         return err;
     }
 }
@@ -151,7 +151,7 @@ MessageMeta SystemIO::GetMessageMeta(const std::string& name, Error* err) const
     auto hFind = FindFirstFile(name.c_str(), &f);
     if (hFind == INVALID_HANDLE_VALUE) {
         *err = IOErrorFromGetLastError();
-        (*err)->AddContext("name", name);
+        (*err)->AddDetails("name", name);
         return {};
     }
     FindClose(hFind);
@@ -179,7 +179,7 @@ void SystemIO::GetSubDirectoriesRecursively(const std::string& path, SubDirList*
     HANDLE handle = FindFirstFile((path + "\\*.*").c_str(), &find_data);
     if (handle == INVALID_HANDLE_VALUE) {
         *err = IOErrorFromGetLastError();
-        (*err)->AddContext("name", path);
+        (*err)->AddDetails("name", path);
         return;
     }
 
@@ -208,7 +208,7 @@ void SystemIO::CollectMessageMetarmationRecursively(const std::string& path,
     HANDLE handle = FindFirstFile((path + "\\*.*").c_str(), &find_data);
     if (handle == INVALID_HANDLE_VALUE) {
         *err = IOErrorFromGetLastError();
-        (*err)->AddContext("name", path);
+        (*err)->AddDetails("name", path);
         return;
     }
 
diff --git a/common/cpp/unittests/common/test_error.cpp b/common/cpp/unittests/common/test_error.cpp
index c588ea0526be16f5977367ee9e5fdb36cdaf3cee..4d459ba1a8b9fcfda124f432f13bc2d1702e32d0 100644
--- a/common/cpp/unittests/common/test_error.cpp
+++ b/common/cpp/unittests/common/test_error.cpp
@@ -27,13 +27,13 @@ TEST(ErrorTemplate, Explain) {
     ASSERT_THAT(error->Explain(), HasSubstr("test"));
 }
 
-TEST(ErrorTemplate, Context) {
+TEST(ErrorTemplate, Details) {
     Error error = asapo::GeneralErrorTemplates::kEndOfFile.Generate("test");
-    error->AddContext("key", "value");
-    error->AddContext("key2", "value2");
+    error->AddDetails("key", "value");
+    error->AddDetails("key2", "value2");
 
     ASSERT_THAT(error->Explain(), AllOf(HasSubstr("test"),
-                                        HasSubstr("context"),
+                                        HasSubstr("details"),
                                         HasSubstr("key:value"),
                                         HasSubstr("key2:value2")
                                        ));
@@ -43,8 +43,8 @@ TEST(ErrorTemplate, Cause) {
     Error error = asapo::GeneralErrorTemplates::kEndOfFile.Generate("test");
     Error error_c = asapo::GeneralErrorTemplates::kMemoryAllocationError.Generate("cause_test");
     Error error_c1 = asapo::GeneralErrorTemplates::kSimpleError.Generate("simple error");
-    error->AddContext("key", "value");
-    error_c->AddContext("key2", "value2");
+    error->AddDetails("key", "value");
+    error_c->AddDetails("key2", "value2");
     error_c->SetCause(std::move(error_c1));
     error->SetCause(std::move(error_c));
     ASSERT_THAT(error->Explain(), AllOf(HasSubstr("test"),
@@ -64,10 +64,10 @@ TEST(ErrorTemplate, Cause) {
 TEST(ErrorTemplate, Json) {
     Error error = asapo::GeneralErrorTemplates::kEndOfFile.Generate("test");
     Error error_c = asapo::GeneralErrorTemplates::kMemoryAllocationError.Generate("cause_test");
-    error->AddContext("key", "value");
+    error->AddDetails("key", "value");
     error->SetCause(std::move(error_c));
     auto expected_string =
-        R"("error":"end of file","message":"test","context":{"key":"value"},"cause":{"error":"memory allocation","message":"cause_test"})";
+        R"("error":"end of file","message":"test","details":{"key":"value"},"cause":{"error":"memory allocation","message":"cause_test"})";
     ASSERT_THAT(error->ExplainInJSON(),  Eq(expected_string));
 }
 
diff --git a/common/cpp/unittests/json_parser/test_json_parser.cpp b/common/cpp/unittests/json_parser/test_json_parser.cpp
index dfc5f3c6c08e634e230ca637a6314f52c60cea9e..d3017cac350dbbdd9ad9182472df99423baad8c0 100644
--- a/common/cpp/unittests/json_parser/test_json_parser.cpp
+++ b/common/cpp/unittests/json_parser/test_json_parser.cpp
@@ -281,4 +281,17 @@ TEST_F(ParseFileTests, Flatten) {
 }
 
 
+TEST(ParseString, RawString) {
+    std::string json = R"({"top":"top","embedded":{"ar":[2,2,3],"str":"text"}})";
+    std::string json_row = R"({"ar":[2,2,3],"str":"text"})";
+    JsonStringParser parser{json};
+
+    std::string res;
+    auto err = parser.Embedded("embedded").GetRawString(&res);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(res, Eq(json_row));
+
+}
+
+
 }
diff --git a/common/go/src/asapo_common/logger/logger.go b/common/go/src/asapo_common/logger/logger.go
index 0f026aa18b9e777a1b72089ca32d3685b97cf6f0..6a87f810ce77a1b9549f127d426dee7070413a04 100644
--- a/common/go/src/asapo_common/logger/logger.go
+++ b/common/go/src/asapo_common/logger/logger.go
@@ -17,6 +17,7 @@ const (
 )
 
 type Logger interface {
+	WithFields(args map[string]interface{}) Logger
 	Info(args ...interface{})
 	Debug(args ...interface{})
 	Fatal(args ...interface{})
@@ -28,6 +29,10 @@ type Logger interface {
 
 var my_logger Logger = &logRusLogger{}
 
+func WithFields(args map[string]interface{}) Logger {
+	return my_logger.WithFields(args)
+}
+
 func Info(args ...interface{}) {
 	my_logger.Info(args...)
 }
diff --git a/common/go/src/asapo_common/logger/logrus_logger.go b/common/go/src/asapo_common/logger/logrus_logger.go
index 4625f27492f47efe5d14ed1a1032b4810c418572..88b41114303b39f073050cb86753c101f6122af9 100644
--- a/common/go/src/asapo_common/logger/logrus_logger.go
+++ b/common/go/src/asapo_common/logger/logrus_logger.go
@@ -13,11 +13,22 @@ func (l *logRusLogger) SetSource(source string) {
 	l.source = source
 }
 
+
+func (l *logRusLogger) WithFields(args map[string]interface{}) Logger {
+	new_log:= &logRusLogger{
+		logger_entry: l.entry().WithFields(args),
+		source:       l.source,
+	}
+	return new_log
+}
+
+
 func (l *logRusLogger) entry() *log.Entry {
 	if l.logger_entry != nil {
 		return l.logger_entry
 	}
 
+
 	formatter := &log.JSONFormatter{
 		FieldMap: log.FieldMap{
 			log.FieldKeyMsg: "message",
diff --git a/common/go/src/asapo_common/logger/logrus_test.go b/common/go/src/asapo_common/logger/logrus_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..d6997f88845eb14da5ebb17cd1111f2d50647e45
--- /dev/null
+++ b/common/go/src/asapo_common/logger/logrus_test.go
@@ -0,0 +1,28 @@
+package logger
+
+import (
+	"github.com/sirupsen/logrus/hooks/test"
+	"github.com/stretchr/testify/assert"
+	"testing"
+)
+
+func logStr(hook *test.Hook) string {
+	s := ""
+	for _, entry := range hook.AllEntries() {
+		ss, _ := entry.String()
+		s += ss
+	}
+	return s
+}
+
+func TestLog(t *testing.T) {
+	l := &logRusLogger{}
+	hook := test.NewLocal(l.entry().Logger)
+	l.WithFields(map[string]interface{}{"testmap1":1}).Info("aaa")
+	assert.Contains(t, logStr(hook),"testmap1")
+
+	hook.Reset()
+	l.WithFields(map[string]interface{}{"testmap2":1}).Info("bbb")
+	assert.NotContains(t, logStr(hook),"testmap1")
+
+}
diff --git a/common/go/src/asapo_common/logger/mock_logger.go b/common/go/src/asapo_common/logger/mock_logger.go
index 484b86cb0175db0e801cc3e11cf42592ecef123a..58c1df744f4b17e4c402290bb22f932347fd7512 100644
--- a/common/go/src/asapo_common/logger/mock_logger.go
+++ b/common/go/src/asapo_common/logger/mock_logger.go
@@ -16,6 +16,11 @@ func SetMockLog() {
 	my_logger = &MockLog
 }
 
+func (l *MockLogger) WithFields(args map[string]interface{}) Logger {
+	l.Called(args)
+	return l
+}
+
 func UnsetMockLog() {
 	my_logger = &logRusLogger{}
 }
diff --git a/common/go/src/asapo_common/utils/authorization.go b/common/go/src/asapo_common/utils/authorization.go
index d707819b9d11758a87f5f3538b204e8d76ed5ee3..8a1b11bb253e16da88b47834816a6235cd4e72f5 100644
--- a/common/go/src/asapo_common/utils/authorization.go
+++ b/common/go/src/asapo_common/utils/authorization.go
@@ -151,7 +151,7 @@ func (a *JWTAuth) CheckAndGetContent(token string, extraClaims interface{}, payl
 	// payload ignored
 	c, ok := CheckJWTToken(token,a.Key)
 	if !ok {
-		return nil,errors.New("wrong JWT token")
+		return nil,errors.New("wrong or expired JWT token")
 	}
 	claim,ok  := c.(*CustomClaims)
 	if !ok {
diff --git a/common/go/src/asapo_common/utils/authorization_test.go b/common/go/src/asapo_common/utils/authorization_test.go
index cda7f43b091bfba5dc5f8228199e6ae125f01933..b0add6be7436fdb17708243a900dddd5846a013d 100644
--- a/common/go/src/asapo_common/utils/authorization_test.go
+++ b/common/go/src/asapo_common/utils/authorization_test.go
@@ -1,11 +1,11 @@
 package utils
 
 import (
+	"github.com/stretchr/testify/assert"
 	"net/http"
-	"testing"
 	"net/http/httptest"
+	"testing"
 	"time"
-	"github.com/stretchr/testify/assert"
 )
 
 type authorizationResponse struct {
diff --git a/consumer/api/cpp/src/consumer.cpp b/consumer/api/cpp/src/consumer.cpp
index ea1fa64a91c5ec917fd05bf6b41c55baaa57523d..df44db24b879b5a268dd738e35f1c1556c69e52c 100644
--- a/consumer/api/cpp/src/consumer.cpp
+++ b/consumer/api/cpp/src/consumer.cpp
@@ -10,7 +10,7 @@ std::unique_ptr<Consumer> Create(const std::string& source_name,
                                  Error* error,
                                  Args&& ... args) noexcept {
     if (source_name.empty()) {
-        *error = ConsumerErrorTemplates::kWrongInput.Generate("Empty Data Source");
+        *error = ConsumerErrorTemplates::kWrongInput.Generate("empty data source");
         return nullptr;
     }
 
diff --git a/consumer/api/cpp/src/consumer_impl.cpp b/consumer/api/cpp/src/consumer_impl.cpp
index 0fda0f43709ff1e1d72915856043e11244415f49..7f623e67b2f9cf6dd09470f127770a6e78eeaa44 100644
--- a/consumer/api/cpp/src/consumer_impl.cpp
+++ b/consumer/api/cpp/src/consumer_impl.cpp
@@ -48,7 +48,9 @@ Error ConsumerErrorFromPartialDataResponse(const std::string& response) {
     PartialErrorData data;
     auto parse_error = GetPartialDataResponseFromJson(response, &data);
     if (parse_error) {
-        return ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response - " + response);
+        auto err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response" );
+        err->AddDetails("response",response);
+        return err;
     }
     auto err = ConsumerErrorTemplates::kPartialData.Generate();
     PartialErrorData* error_data = new PartialErrorData{data};
@@ -124,7 +126,7 @@ Error ProcessRequestResponce(const RequestInfo& request,
     }
 
     if (err != nullptr) {
-        err->AddContext("host", request.host)->AddContext("api", "request.api");
+        err->AddDetails("host", request.host)->AddDetails("api", request.api);
     }
     return err;
 
@@ -219,9 +221,9 @@ Error ConsumerImpl::ProcessDiscoverServiceResult(Error err, std::string* uri_to_
         if (err == ConsumerErrorTemplates::kUnsupportedClient) {
             return err;
         }
-        return ConsumerErrorTemplates::kUnavailableService.Generate(" on " + endpoint_
-                + (err != nullptr ? ": " + err->Explain()
-                   : ""));
+        auto ret_err = ConsumerErrorTemplates::kUnavailableService.Generate(std::move(err));
+        ret_err->AddDetails("destination",endpoint_);
+        return ret_err;
     }
     return nullptr;
 }
@@ -244,7 +246,8 @@ bool ConsumerImpl::SwitchToGetByIdIfPartialData(Error* err,
     if (*err == ConsumerErrorTemplates::kPartialData) {
         auto error_data = static_cast<const PartialErrorData*>((*err)->GetCustomData());
         if (error_data == nullptr) {
-            *err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response - " + response);
+            *err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response");
+            (*err)->AddDetails("response",response);
             return false;
         }
         *redirect_uri = std::to_string(error_data->id);
@@ -428,7 +431,7 @@ Error ConsumerImpl::GetDataFromFile(MessageMeta* info, MessageData* data) {
                                             (system_clock::now() - start).count());
     }
     if (err != nullptr) {
-        return ConsumerErrorTemplates::kLocalIOError.Generate(err->Explain());
+        return ConsumerErrorTemplates::kLocalIOError.Generate(std::move(err));
     }
     return nullptr;
 }
@@ -654,7 +657,8 @@ std::string ConsumerImpl::GetStreamMeta(const std::string& stream, Error* err) {
 DataSet DecodeDatasetFromResponse(std::string response, Error* err) {
     DataSet res;
     if (!res.SetFromJson(std::move(response))) {
-        *err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response:" + response);
+        *err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response");
+        (*err)->AddDetails("response",response);
         return {0, 0, MessageMetas{}};
     } else {
         return res;
diff --git a/deploy/asapo_services/run_maxwell.sh b/deploy/asapo_services/run_maxwell.sh
index d2a74ed1f22c7cd92adf9e18ce672383dc5f7bd4..13bd97f0a38617cd4092b1c84528df2d44f8459f 100755
--- a/deploy/asapo_services/run_maxwell.sh
+++ b/deploy/asapo_services/run_maxwell.sh
@@ -45,7 +45,7 @@ ASAPO_LIGHTWEIGHT_SERVICE_NODES=`scontrol show hostnames $SLURM_JOB_NODELIST | h
 mkdir -p $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED $MONGO_DIR
 chmod 777 $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED $MONGO_DIR
 cd $SERVICE_DATA_CLUSTER_SHARED
-mkdir esdatadir fluentd grafana influxdb mongodb
+mkdir esdatadir fluentd grafana influxdb mongodb prometheus alertmanager
 chmod 777 *
 
 #todo: elastic search check
diff --git a/deploy/asapo_services/scripts/asapo-monitoring.nmd.tpl b/deploy/asapo_services/scripts/asapo-monitoring.nmd.tpl
index 677e5413fa0000c45754de72151909eb135b79d4..0f1a154934e3e227e6e9cb3245c494c2bdcb5344 100644
--- a/deploy/asapo_services/scripts/asapo-monitoring.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-monitoring.nmd.tpl
@@ -27,6 +27,8 @@ job "asapo-monitoring" {
       driver = "docker"
       user = "${asapo_user}"
       config {
+        security_opt = ["no-new-privileges"]
+        userns_mode = "host"
         image = "prom/alertmanager:${alertmanager_version}"
         args = [
           "--web.route-prefix=/alertmanager/",
@@ -87,6 +89,8 @@ job "asapo-monitoring" {
       driver = "docker"
       user = "${asapo_user}"
       config {
+        security_opt = ["no-new-privileges"]
+        userns_mode = "host"
         image = "prom/prometheus:${prometheus_version}"
         args = [
           "--web.external-url=/prometheus/",
diff --git a/discovery/src/asapo_discovery/server/get_version.go b/discovery/src/asapo_discovery/server/get_version.go
index 7c1127df3bd049016b2d1be5e4ecb132963a6786..655b7052b17b1a934d018a4ca4b5dbbc8c2ff36c 100644
--- a/discovery/src/asapo_discovery/server/get_version.go
+++ b/discovery/src/asapo_discovery/server/get_version.go
@@ -27,10 +27,11 @@ func extractProtocol(r *http.Request) (string, error) {
 }
 
 func routeGetVersion(w http.ResponseWriter, r *http.Request) {
-	log_str := "processing get version"
+	log_str := "processing get version request"
 	logger.Debug(log_str)
 
 	if ok := checkDiscoveryApiVersion(w, r); !ok {
+		logger.Debug("checkDiscoveryApiVersion failed")
 		return
 	}
 	keys := r.URL.Query()
diff --git a/discovery/src/asapo_discovery/server/routes.go b/discovery/src/asapo_discovery/server/routes.go
index 2e0b31972864702c63af499ab9dc7335b13bae25..9cbc5b920a84dd4e2c51d02257d9263ea40b98a2 100644
--- a/discovery/src/asapo_discovery/server/routes.go
+++ b/discovery/src/asapo_discovery/server/routes.go
@@ -15,12 +15,15 @@ func getService(service string) (answer []byte, code int) {
 		answer, err = requestHandler.GetSingleService(service)
 
 	}
-	log_str := "processing get " + service
+	log_str := "processing get " + service + " request"
 	if err != nil {
 		logger.Error(log_str + " - " + err.Error())
 		return []byte(err.Error()), http.StatusInternalServerError
 	}
-	logger.Debug(log_str + " -  got " + string(answer))
+	logger.WithFields(map[string]interface{}{
+		"service": service,
+		"answer":  string(answer),
+	}).Debug("processing get service request")
 	return answer, http.StatusOK
 }
 
@@ -39,7 +42,6 @@ func validateProtocol(w http.ResponseWriter, r *http.Request, client string) boo
 		logger.Error(log_str + " - " + hint)
 		return false
 	}
-	logger.Debug(log_str + " - ok")
 	return true
 }
 
diff --git a/discovery/src/asapo_discovery/server/routes_test.go b/discovery/src/asapo_discovery/server/routes_test.go
index 394a2625047932bd968d8f4b5020c0249369a305..98db36704f24217350427b12e3584f83da491e26 100644
--- a/discovery/src/asapo_discovery/server/routes_test.go
+++ b/discovery/src/asapo_discovery/server/routes_test.go
@@ -1,17 +1,17 @@
 package server
 
 import (
+	"asapo_common/logger"
+	"asapo_common/utils"
 	"asapo_common/version"
+	"asapo_discovery/common"
+	"asapo_discovery/request_handler"
 	"github.com/stretchr/testify/mock"
 	"github.com/stretchr/testify/suite"
-	"asapo_common/logger"
-	"asapo_common/utils"
 	"net/http"
 	"net/http/httptest"
 	"strings"
 	"testing"
-	"asapo_discovery/request_handler"
-	"asapo_discovery/common"
 )
 
 func containsMatcher(substr string) func(str string) bool {
@@ -74,8 +74,8 @@ var receiverTests = []requestTest {
 func (suite *GetServicesTestSuite) TestGetReceivers() {
 	for _,test:= range receiverTests {
 		if test.code == http.StatusOK {
-			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating producer")))
-			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameReceiverService)))
+			logger.MockLog.On("WithFields", mock.Anything)
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("request")))
 		} else {
 			logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("validating producer")))
 		}
@@ -99,8 +99,8 @@ var brokerTests = []requestTest {
 func (suite *GetServicesTestSuite) TestGetBroker() {
 	for _,test:= range brokerTests {
 		if test.code == http.StatusOK {
-			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating consumer")))
-			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameBrokerService)))
+			logger.MockLog.On("WithFields", mock.Anything)
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("request")))
 		} else {
 			logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("validating consumer")))
 		}
@@ -117,7 +117,8 @@ func (suite *GetServicesTestSuite) TestGetBroker() {
 
 
 func (suite *GetServicesTestSuite) TestGetMongo() {
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameMongoService)))
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("request")))
 
 	w := doRequest("/asapo-mongodb")
 
@@ -127,8 +128,8 @@ func (suite *GetServicesTestSuite) TestGetMongo() {
 }
 
 func (suite *GetServicesTestSuite) TestGetFts() {
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameFtsService)))
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating")))
+	logger.MockLog.On("WithFields", mock.Anything)
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("request")))
 
 	w := doRequest("/" + version.GetDiscoveryApiVersion()+"/asapo-file-transfer?protocol=v0.1")
 
@@ -138,7 +139,7 @@ func (suite *GetServicesTestSuite) TestGetFts() {
 }
 
 func (suite *GetServicesTestSuite) TestGetVersions() {
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get version")))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("request")))
 
 	w := doRequest("/" + version.GetDiscoveryApiVersion() + "/version")
 
diff --git a/docs/site/examples/start_asapo_socket.sh b/docs/site/examples/start_asapo_socket.sh
index a50a87ff98087e3a338bb7c9d7e7726d897cd51e..0150a44a8898ad3cc282decdb4f99f52ab82641e 100644
--- a/docs/site/examples/start_asapo_socket.sh
+++ b/docs/site/examples/start_asapo_socket.sh
@@ -16,7 +16,7 @@ mkdir -p $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHAR
 chmod 777 $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED $DATA_GLOBAL_SHARED_ONLINE
 
 cd $SERVICE_DATA_CLUSTER_SHARED
-mkdir -p fluentd grafana influxdb influxdb2 mongodb
+mkdir -p fluentd grafana influxdb influxdb2 mongodb prometheus alertmanager
 chmod 777 *
 
 docker run --privileged --rm -v /var/run/docker.sock:/var/run/docker.sock \
diff --git a/docs/site/examples/start_asapo_tcp.sh b/docs/site/examples/start_asapo_tcp.sh
index 007595fe59d1df7a3bcc8c0c0772faaa1b536ff9..4debb8e82089a4dc0d4bdb9fa744c41afa2b67f4 100644
--- a/docs/site/examples/start_asapo_tcp.sh
+++ b/docs/site/examples/start_asapo_tcp.sh
@@ -22,7 +22,7 @@ mkdir -p $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHAR
 chmod 777 $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED $DATA_GLOBAL_SHARED_ONLINE
 
 cd $SERVICE_DATA_CLUSTER_SHAREDdetector
-mkdir -p fluentd grafana influxdb2 mongodb
+mkdir -p fluentd grafana influxdb influxdb2 mongodb prometheus alertmanager
 chmod 777 *
 
 docker run --privileged --userns=host --security-opt no-new-privileges --rm \
diff --git a/file_transfer/src/asapo_file_transfer/server/transfer.go b/file_transfer/src/asapo_file_transfer/server/transfer.go
index 2e0cfb92755eae706c617ecb038d11664ad45c6d..9dffe4b1339814018318f456350db6125fcc44ba 100644
--- a/file_transfer/src/asapo_file_transfer/server/transfer.go
+++ b/file_transfer/src/asapo_file_transfer/server/transfer.go
@@ -11,10 +11,8 @@ import (
 	"os"
 	"path"
 	"path/filepath"
-	"strconv"
 )
 
-
 type fileTransferRequest struct {
 	Folder   string
 	FileName string
@@ -23,109 +21,113 @@ type fileTransferRequest struct {
 func Exists(name string) bool {
 	f, err := os.Open(name)
 	defer f.Close()
-	return err==nil
+	return err == nil
 }
 
-func checkClaim(r *http.Request,ver utils.VersionNum,request* fileTransferRequest) (int,error) {
+func checkClaim(r *http.Request, ver utils.VersionNum, request *fileTransferRequest) (int, error) {
 	var extraClaim structs.FolderTokenTokenExtraClaim
 	if err := utils.JobClaimFromContext(r, nil, &extraClaim); err != nil {
-		return http.StatusInternalServerError,err
+		return http.StatusInternalServerError, err
 	}
 	if ver.Id > 1 {
 		request.Folder = extraClaim.RootFolder
-		return http.StatusOK,nil
+		return http.StatusOK, nil
 	}
 
-	if extraClaim.RootFolder!=request.Folder {
-		err_txt := "access forbidden for folder "+request.Folder
-		log.Error("cannot transfer file: "+err_txt)
+	if extraClaim.RootFolder != request.Folder {
+		err_txt := "access forbidden for folder " + request.Folder
+		log.Error("cannot transfer file: " + err_txt)
 		return http.StatusUnauthorized, errors.New(err_txt)
 	}
-	return http.StatusOK,nil
+	return http.StatusOK, nil
 }
 
-func checkFileExists(r *http.Request,name string) (int,error) {
+func checkFileExists(r *http.Request, name string) (int, error) {
 	if !Exists(name) {
-		err_txt := "file "+name+" does not exist or cannot be read"
-		log.Error("cannot transfer file: "+err_txt)
-		return http.StatusNotFound,errors.New(err_txt)
+		err_txt := "file " + name + " does not exist or cannot be read"
+		log.Error("cannot transfer file: " + err_txt)
+		return http.StatusNotFound, errors.New(err_txt)
 	}
-	return http.StatusOK,nil
+	return http.StatusOK, nil
 
 }
 
-func checkRequest(r *http.Request, ver utils.VersionNum) (string,int,error) {
+func checkRequest(r *http.Request, ver utils.VersionNum) (string, int, error) {
 	var request fileTransferRequest
-	err := utils.ExtractRequest(r,&request)
+	err := utils.ExtractRequest(r, &request)
 	if err != nil {
-		return "",http.StatusBadRequest,err
+		return "", http.StatusBadRequest, err
 	}
 
-	if status,err := checkClaim(r,ver, &request); err != nil {
-		return "",status,err
+	if status, err := checkClaim(r, ver, &request); err != nil {
+		return "", status, err
 	}
 	var fullName string
 	if ver.Id == 1 { // protocol v0.1
-		fullName = filepath.Clean(request.Folder+string(os.PathSeparator)+request.FileName)
+		fullName = filepath.Clean(request.Folder + string(os.PathSeparator) + request.FileName)
 	} else {
-		fullName = filepath.Clean(request.Folder+string(os.PathSeparator)+request.FileName)
+		fullName = filepath.Clean(request.Folder + string(os.PathSeparator) + request.FileName)
 	}
 
-	if status,err := checkFileExists(r,fullName); err != nil {
-		return "",status,err
+	if status, err := checkFileExists(r, fullName); err != nil {
+		return "", status, err
 	}
-	return fullName,http.StatusOK,nil
+	return fullName, http.StatusOK, nil
 }
 
 func serveFile(w http.ResponseWriter, r *http.Request, fullName string) {
 	_, file := path.Split(fullName)
 	w.Header().Set("Content-Disposition", "attachment; filename=\""+file+"\"")
-	log.Debug("Transferring file " + fullName)
-	http.ServeFile(w,r, fullName)
+
+	log.WithFields(map[string]interface{}{
+		"name": fullName,
+	}).Debug("transferring file")
+
+	http.ServeFile(w, r, fullName)
 }
 
 func serveFileSize(w http.ResponseWriter, r *http.Request, fullName string) {
 	var fsize struct {
-		FileSize int64  `json:"file_size"`
+		FileSize int64 `json:"file_size"`
 	}
 
 	fi, err := os.Stat(fullName)
 	if err != nil {
-		utils.WriteServerError(w,err,http.StatusBadRequest)
-		log.Error("Error getting file size for " + fullName+": "+err.Error())
+		utils.WriteServerError(w, err, http.StatusBadRequest)
+		log.Error("error getting file size for " + fullName + ": " + err.Error())
 	}
-	log.Debug("Sending file size "+strconv.FormatInt(fi.Size(),10)+" for " + fullName)
+
+	log.WithFields(map[string]interface{}{
+		"name": fullName,
+		"size": fi.Size(),
+	}).Debug("sending file size")
 
 	fsize.FileSize = fi.Size()
-	b,_ := json.Marshal(&fsize)
+	b, _ := json.Marshal(&fsize)
 	w.Write(b)
 }
 
-
-func checkFtsApiVersion(w http.ResponseWriter, r *http.Request) (utils.VersionNum,bool) {
+func checkFtsApiVersion(w http.ResponseWriter, r *http.Request) (utils.VersionNum, bool) {
 	return utils.PrecheckApiVersion(w, r, version.GetFtsApiVersion())
 }
 
 func routeFileTransfer(w http.ResponseWriter, r *http.Request) {
-	ver, ok := checkFtsApiVersion(w, r);
+	ver, ok := checkFtsApiVersion(w, r)
 	if !ok {
 		return
 	}
 
-	fullName, status,err := checkRequest(r,ver);
+	fullName, status, err := checkRequest(r, ver)
 	if err != nil {
-		utils.WriteServerError(w,err,status)
+		utils.WriteServerError(w, err, status)
 		return
 	}
 
 	sizeonly := r.URL.Query().Get("sizeonly")
- 	if (sizeonly != "true") {
-		serveFile(w,r,fullName)
+	if sizeonly != "true" {
+		serveFile(w, r, fullName)
 	} else {
-		serveFileSize(w,r,fullName)
+		serveFileSize(w, r, fullName)
 	}
 
-
-
-
 }
diff --git a/producer/api/cpp/src/request_handler_tcp.cpp b/producer/api/cpp/src/request_handler_tcp.cpp
index 9595e3146b790beae689cb043978678cea7285db..defaa31a51e326d0c4329bf9fba0c6f6212b19e3 100644
--- a/producer/api/cpp/src/request_handler_tcp.cpp
+++ b/producer/api/cpp/src/request_handler_tcp.cpp
@@ -93,22 +93,22 @@ Error RequestHandlerTcp::ReceiveResponse(std::string* response) {
     switch (sendDataResponse.error_code) {
     case kNetAuthorizationError : {
         auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
-        res_err->AddContext("response", sendDataResponse.message);
+        res_err->AddDetails("response", sendDataResponse.message);
         return res_err;
     }
     case kNetErrorNotSupported : {
         auto res_err = ProducerErrorTemplates::kUnsupportedClient.Generate();
-        res_err->AddContext("response", sendDataResponse.message);
+        res_err->AddDetails("response", sendDataResponse.message);
         return res_err;
     }
     case kNetErrorWrongRequest : {
         auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
-        res_err->AddContext("response", sendDataResponse.message);
+        res_err->AddDetails("response", sendDataResponse.message);
         return res_err;
     }
     case kNetErrorWarning: {
         auto res_err = ProducerErrorTemplates::kServerWarning.Generate();
-        res_err->AddContext("response", sendDataResponse.message);
+        res_err->AddDetails("response", sendDataResponse.message);
         return res_err;
     }
     case kNetErrorReauthorize: {
@@ -122,7 +122,7 @@ Error RequestHandlerTcp::ReceiveResponse(std::string* response) {
         return nullptr;
     default:
         auto res_err = ProducerErrorTemplates::kInternalServerError.Generate();
-        res_err->AddContext("response", sendDataResponse.message);
+            res_err->AddDetails("response", sendDataResponse.message);
         return res_err;
     }
 }
diff --git a/receiver/src/receiver_data_server/net_server/rds_fabric_server.cpp b/receiver/src/receiver_data_server/net_server/rds_fabric_server.cpp
index a31ab18747093992b53c9665bbfaa5f6906f53ea..4df46716de5beb55056628bdbe6784685b255b9a 100644
--- a/receiver/src/receiver_data_server/net_server/rds_fabric_server.cpp
+++ b/receiver/src/receiver_data_server/net_server/rds_fabric_server.cpp
@@ -30,7 +30,7 @@ Error RdsFabricServer::Initialize() {
         return err;
     }
 
-    log__->Info("Started Fabric ReceiverDataServer at '" + server__->GetAddress() + "'");
+    log__->Info(LogMessageWithFields("started fabric data server").Append("address",server__->GetAddress()));
 
     return err;
 }
diff --git a/receiver/src/receiver_data_server/net_server/rds_tcp_server.cpp b/receiver/src/receiver_data_server/net_server/rds_tcp_server.cpp
index fc8efd3fee6b67a12a13681e71832a3be0ba7319..6f24fc62a6555b94540efb41a01d1c11b0794cc2 100644
--- a/receiver/src/receiver_data_server/net_server/rds_tcp_server.cpp
+++ b/receiver/src/receiver_data_server/net_server/rds_tcp_server.cpp
@@ -1,34 +1,38 @@
 #include "rds_tcp_server.h"
 #include "../receiver_data_server_logger.h"
+#include "../receiver_data_server_error.h"
 
 #include "asapo/io/io_factory.h"
 #include "asapo/common/networking.h"
 
 namespace asapo {
 
-RdsTcpServer::RdsTcpServer(std::string address, const AbstractLogger* logger) : io__{GenerateDefaultIO()}, log__{logger},
-    address_{std::move(address)} {}
+RdsTcpServer::RdsTcpServer(std::string address, const AbstractLogger *logger) : io__{GenerateDefaultIO()},
+                                                                                log__{logger},
+                                                                                address_{std::move(address)} {}
 
 Error RdsTcpServer::Initialize() {
-    Error err;
-    if (master_socket_ == kDisconnectedSocketDescriptor) {
-        master_socket_ = io__->CreateAndBindIPTCPSocketListener(address_, kMaxPendingConnections, &err);
-        if (!err) {
-            log__->Info("Started TCP ReceiverDataServer at '" + address_ + "'");
-        } else {
-            log__->Error("TCP ReceiverDataServer cannot listen on " + address_ + ": " + err->Explain());
-        }
+    if (master_socket_ != kDisconnectedSocketDescriptor) {
+        return GeneralErrorTemplates::kSimpleError.Generate("server was already initialized");
+    }
+    Error io_err;
+    master_socket_ = io__->CreateAndBindIPTCPSocketListener(address_, kMaxPendingConnections, &io_err);
+    if (!io_err) {
+        log__->Info(LogMessageWithFields("started TCP data server").Append("address", address_));
     } else {
-        err = GeneralErrorTemplates::kSimpleError.Generate("Server was already initialized");
+        auto err =
+            ReceiverDataServerErrorTemplates::kServerError.Generate("cannot start TCP data server", std::move(io_err));
+        err->AddDetails("address", address_);
+        return err;
     }
-    return err;
+    return nullptr;
 }
 
-ListSocketDescriptors RdsTcpServer::GetActiveSockets(Error* err) {
+ListSocketDescriptors RdsTcpServer::GetActiveSockets(Error *err) {
     std::vector<std::string> new_connections;
     auto sockets = io__->WaitSocketsActivity(master_socket_, &sockets_to_listen_, &new_connections, err);
-    for (auto& connection : new_connections) {
-        log__->Debug("new connection from " + connection);
+    for (auto &connection: new_connections) {
+        log__->Debug(LogMessageWithFields("new connection").Append("origin", connection));
     }
     return sockets;
 }
@@ -36,42 +40,45 @@ ListSocketDescriptors RdsTcpServer::GetActiveSockets(Error* err) {
 void RdsTcpServer::CloseSocket(SocketDescriptor socket) {
     sockets_to_listen_.erase(std::remove(sockets_to_listen_.begin(), sockets_to_listen_.end(), socket),
                              sockets_to_listen_.end());
-    log__->Debug("connection " + io__->AddressFromSocket(socket) + " closed");
+    log__->Debug(LogMessageWithFields("connection closed").Append("origin", io__->AddressFromSocket(socket)));
     io__->CloseSocket(socket, nullptr);
 }
 
-ReceiverDataServerRequestPtr RdsTcpServer::ReadRequest(SocketDescriptor socket, Error* err) {
+ReceiverDataServerRequestPtr RdsTcpServer::ReadRequest(SocketDescriptor socket, Error *err) {
     GenericRequestHeader header;
+    Error io_err;
+    *err = nullptr;
     io__->Receive(socket, &header,
-                  sizeof(GenericRequestHeader), err);
-    if (*err == GeneralErrorTemplates::kEndOfFile) {
+                  sizeof(GenericRequestHeader), &io_err);
+    if (io_err == GeneralErrorTemplates::kEndOfFile) {
+        *err = std::move(io_err);
         CloseSocket(socket);
         return nullptr;
-    } else if (*err) {
-        log__->Error("error getting next request from " + io__->AddressFromSocket(socket) + ": " + (*err)->
-                     Explain()
-                    );
+    } else if (io_err) {
+        *err = ReceiverDataServerErrorTemplates::kServerError.Generate("error getting next request",std::move(io_err));
+        (*err)->AddDetails("origin",io__->AddressFromSocket(socket));
         return nullptr;
     }
     return ReceiverDataServerRequestPtr{new ReceiverDataServerRequest{header, (uint64_t) socket}};
 }
 
-GenericRequests RdsTcpServer::ReadRequests(const ListSocketDescriptors& sockets) {
+GenericRequests RdsTcpServer::ReadRequests(const ListSocketDescriptors &sockets) {
     GenericRequests requests;
-    for (auto client : sockets) {
+    for (auto client: sockets) {
         Error err;
         auto request = ReadRequest(client, &err);
         if (err) {
             continue;
         }
-        log__->Debug("received request opcode: " + std::to_string(request->header.op_code) + " id: " + std::to_string(
-                         request->header.data_id));
+        log__->Debug(LogMessageWithFields("received request").
+            Append("operation", OpcodeToString(request->header.op_code)).
+            Append("id", request->header.data_id));
         requests.emplace_back(std::move(request));
     }
     return requests;
 }
 
-GenericRequests RdsTcpServer::GetNewRequests(Error* err) {
+GenericRequests RdsTcpServer::GetNewRequests(Error *err) {
     auto sockets = GetActiveSockets(err);
     if (*err) {
         return {};
@@ -82,7 +89,7 @@ GenericRequests RdsTcpServer::GetNewRequests(Error* err) {
 
 RdsTcpServer::~RdsTcpServer() {
     if (!io__) return; // need for test that override io__ to run
-    for (auto client : sockets_to_listen_) {
+    for (auto client: sockets_to_listen_) {
         io__->CloseSocket(client, nullptr);
     }
     io__->CloseSocket(master_socket_, nullptr);
@@ -92,28 +99,31 @@ void RdsTcpServer::HandleAfterError(uint64_t source_id) {
     CloseSocket(static_cast<int>(source_id));
 }
 
-Error RdsTcpServer::SendResponse(const ReceiverDataServerRequest* request, const GenericNetworkResponse* response) {
-    Error err;
-    io__->Send(static_cast<int>(request->source_id), response, sizeof(*response), &err);
-    if (err) {
-        log__->Error("cannot send to consumer" + err->Explain());
+Error RdsTcpServer::SendResponse(const ReceiverDataServerRequest *request, const GenericNetworkResponse *response) {
+    Error io_err,err;
+    auto socket= static_cast<int>(request->source_id);
+    io__->Send(socket, response, sizeof(*response), &io_err);
+    if (io_err) {
+        err = ReceiverDataServerErrorTemplates::kServerError.Generate("error sending response",std::move(io_err));
+        err->AddDetails("origin",io__->AddressFromSocket(socket));
     }
     return err;
 }
 
 Error
-RdsTcpServer::SendResponseAndSlotData(const ReceiverDataServerRequest* request, const GenericNetworkResponse* response,
-                                      const CacheMeta* cache_slot) {
+RdsTcpServer::SendResponseAndSlotData(const ReceiverDataServerRequest *request, const GenericNetworkResponse *response,
+                                      const CacheMeta *cache_slot) {
     Error err;
-
     err = SendResponse(request, response);
     if (err) {
         return err;
     }
-
-    io__->Send(static_cast<int>(request->source_id), cache_slot->addr, cache_slot->size, &err);
-    if (err) {
-        log__->Error("cannot send slot to worker" + err->Explain());
+    Error io_err;
+    auto socket= static_cast<int>(request->source_id);
+    io__->Send(socket, cache_slot->addr, cache_slot->size, &io_err);
+    if (io_err) {
+        err = ReceiverDataServerErrorTemplates::kServerError.Generate("error sending slot data",std::move(io_err));
+        err->AddDetails("origin",io__->AddressFromSocket(socket));
     }
     return err;
 }
diff --git a/receiver/src/receiver_data_server/receiver_data_server.cpp b/receiver/src/receiver_data_server/receiver_data_server.cpp
index 1dd311a908276934e5f0ef80c8baca839d203044..3ac686056c5e58887e388e3711e6a9283db230c9 100644
--- a/receiver/src/receiver_data_server/receiver_data_server.cpp
+++ b/receiver/src/receiver_data_server/receiver_data_server.cpp
@@ -24,12 +24,11 @@ void ReceiverDataServer::Run() {
         if (err == IOErrorTemplates::kTimeout) {
             continue;
         }
-
         if (!err) {
             err = request_pool__->AddRequests(std::move(requests));
         }
         if (err) {
-            log__->Error(std::string("receiver data server stopped: ") + err->Explain());
+            log__->Error(LogMessageWithFields("receiver data server stopped").Append("cause",std::move(err)));
             return;
         }
     }
diff --git a/receiver/src/receiver_data_server/receiver_data_server_error.h b/receiver/src/receiver_data_server/receiver_data_server_error.h
index 3e2e48601aa4b2d886ba216b7d731a15c593bfd7..be9bd21d5311d405f3cf8f1e993cb5a197a6db26 100644
--- a/receiver/src/receiver_data_server/receiver_data_server_error.h
+++ b/receiver/src/receiver_data_server/receiver_data_server_error.h
@@ -7,7 +7,8 @@ namespace asapo {
 
 enum class ReceiverDataServerErrorType {
     kMemoryPool,
-    kWrongRequest
+    kWrongRequest,
+    kServerError
 };
 
 using ReceiverDataServerErrorTemplate = ServiceErrorTemplate<ReceiverDataServerErrorType>;
@@ -21,6 +22,9 @@ auto const kWrongRequest = ReceiverDataServerErrorTemplate {
     "wrong request", ReceiverDataServerErrorType::kWrongRequest
 };
 
+auto const kServerError = ReceiverDataServerErrorTemplate {
+    "server error", ReceiverDataServerErrorType::kServerError
+};
 
 }
 }
diff --git a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
index f2409de029b5cb2cfcb49b469ed16a8d5b13f987..3ecec5d8811bb8551e6c671e08dc8befa9a2ecab 100644
--- a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
+++ b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
@@ -92,7 +92,10 @@ void ReceiverDataServerRequestHandler::ProcessRequestTimeoutUnlocked(GenericRequ
 
 void ReceiverDataServerRequestHandler::HandleInvalidRequest(const ReceiverDataServerRequest* receiver_request,
         NetworkErrorCode code) {
-    SendResponse(receiver_request, code);
+    auto err = SendResponse(receiver_request, code);
+    if (err) {
+        log__->Error(err);
+    }
     server_->HandleAfterError(receiver_request->source_id);
     switch (code) {
     case NetworkErrorCode::kNetErrorWrongRequest:
@@ -111,8 +114,8 @@ void ReceiverDataServerRequestHandler::HandleValidRequest(const ReceiverDataServ
         const CacheMeta* meta) {
     auto err = SendResponseAndSlotData(receiver_request, meta);
     if (err) {
-        log__->Error("failed to send slot:" + err->Explain());
         server_->HandleAfterError(receiver_request->source_id);
+        log__->Error(err);
     } else {
         statistics__->IncreaseRequestCounter();
         statistics__->IncreaseRequestDataVolume(receiver_request->header.data_size);
diff --git a/receiver/src/request.cpp b/receiver/src/request.cpp
index 0c4ed721b105dc68bf7a93c044ecc8b71150d212..25172a9abf261044f31f6fc272b5d9ea23aedb3f 100644
--- a/receiver/src/request.cpp
+++ b/receiver/src/request.cpp
@@ -18,7 +18,9 @@ Error Request::PrepareDataBufferAndLockIfNeeded() {
         try {
             data_buffer_.reset(new uint8_t[(size_t)request_header_.data_size]);
         } catch(std::exception& e) {
-            auto err = GeneralErrorTemplates::kMemoryAllocationError.Generate(e.what());
+            auto err = GeneralErrorTemplates::kMemoryAllocationError.Generate(
+                std::string("cannot allocate memory for request"));
+            err->AddDetails("reason", e.what())->AddDetails("size", std::to_string(request_header_.data_size));
             return err;
         }
     } else {
@@ -27,7 +29,9 @@ Error Request::PrepareDataBufferAndLockIfNeeded() {
         if (data_ptr) {
             slot_meta_ = slot;
         } else {
-            return GeneralErrorTemplates::kMemoryAllocationError.Generate("cannot allocate slot in cache");
+            auto err = GeneralErrorTemplates::kMemoryAllocationError.Generate("cannot allocate slot in cache");
+            err->AddDetails("size", std::to_string(request_header_.data_size));
+            return err;
         }
     }
     return nullptr;
diff --git a/receiver/src/request_handler/authorization_client.cpp b/receiver/src/request_handler/authorization_client.cpp
index 183fac7cb8fd1e2d2b9de895704f22f7993c5303..2a4ab650ee005ebc3bc949e60409bed9f96e99f6 100644
--- a/receiver/src/request_handler/authorization_client.cpp
+++ b/receiver/src/request_handler/authorization_client.cpp
@@ -22,15 +22,16 @@ Error ErrorFromAuthorizationServerResponse(Error err, const std::string response
     if (err) {
         return_err = asapo::ReceiverErrorTemplates::kInternalServerError.Generate(
             "cannot authorize request");
+        return_err->AddDetails("response", response);
         return_err->SetCause(std::move(err));
     } else {
         if (code != HttpCode::Unauthorized) {
             return_err = asapo::ReceiverErrorTemplates::kInternalServerError.Generate();
-            return_err->AddContext("response", response)->AddContext("errorCode", std::to_string(int(
-                code)));
         } else {
             return_err = asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate();
         }
+        return_err->AddDetails("response", response)->AddDetails("errorCode", std::to_string(int(
+            code)));
     }
     return return_err;
 }
@@ -45,7 +46,7 @@ Error CheckAccessType(SourceType source_type, const std::vector<std::string> &ac
         for (size_t i = 0; i < access_types.size(); i++) {
             types += (i > 0 ? "," : "") + access_types[i];
         }
-        err->AddContext("expected", source_type == SourceType::kProcessed ? "write" : "writeraw")->AddContext("have",
+        err->AddDetails("expected", source_type == SourceType::kProcessed ? "write" : "writeraw")->AddDetails("have",
                                                                                                               types);
         return err;
     }
@@ -56,6 +57,7 @@ Error ParseServerResponse(const std::string &response,
                           std::vector<std::string> *access_types,
                           AuthorizationData *data) {
     Error err;
+
     AuthorizationData creds;
     JsonStringParser parser{response};
     std::string stype;
@@ -69,7 +71,7 @@ Error ParseServerResponse(const std::string &response,
         (err = GetSourceTypeFromString(stype, &data->source_type)) ||
         (err = parser.GetString("beamline", &data->beamline));
     if (err) {
-        return ErrorFromAuthorizationServerResponse(std::move(err), "", code);
+        return ErrorFromAuthorizationServerResponse(std::move(err), response, code);
     }
     return nullptr;
 }
@@ -82,7 +84,7 @@ Error UpdateDataFromServerResponse(const std::string &response, HttpCode code, A
     err = ParseServerResponse(response, code, &access_types, data);
     if (err) {
         *data = old_data;
-        return ErrorFromAuthorizationServerResponse(std::move(err), response, code);
+        return err;
     }
 
     err = CheckAccessType(data->source_type, access_types);
diff --git a/receiver/src/request_handler/file_processors/receive_file_processor.cpp b/receiver/src/request_handler/file_processors/receive_file_processor.cpp
index 3ff0d607e6c98e7d30d9f5dcc0e0e4cc12d0ae08..02da8d258b2471bbd524cdd71989eeea9ecacdf2 100644
--- a/receiver/src/request_handler/file_processors/receive_file_processor.cpp
+++ b/receiver/src/request_handler/file_processors/receive_file_processor.cpp
@@ -5,6 +5,7 @@
 #include "../../receiver_error.h"
 #include "../../request.h"
 #include "../../receiver_config.h"
+#include "../../receiver_logger.h"
 
 namespace asapo {
 
@@ -23,7 +24,8 @@ Error ReceiveFileProcessor::ProcessFile(const Request* request, bool overwrite)
     }
     err =  io__->ReceiveDataToFile(socket, root_folder, fname, (size_t) fsize, true, overwrite);
     if (!err) {
-        log__->Debug("received file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
+        log__->Debug(RequestLog("received file", request).Append("size",std::to_string(fsize)).Append("name",
+                root_folder + kPathSeparator + fname));
     }
     return err;
 }
diff --git a/receiver/src/request_handler/file_processors/write_file_processor.cpp b/receiver/src/request_handler/file_processors/write_file_processor.cpp
index 3dcc5ae0076116e780f2d4ebc76e0bdaebfab68f..e9886926f32c749692b6af7dc8d204144fb5b446 100644
--- a/receiver/src/request_handler/file_processors/write_file_processor.cpp
+++ b/receiver/src/request_handler/file_processors/write_file_processor.cpp
@@ -3,6 +3,7 @@
 #include "asapo/preprocessor/definitions.h"
 #include "../../receiver_error.h"
 #include "../../request.h"
+#include "../../receiver_logger.h"
 
 namespace asapo {
 
@@ -14,7 +15,9 @@ WriteFileProcessor::WriteFileProcessor() : FileProcessor()  {
 Error WriteFileProcessor::ProcessFile(const Request* request, bool overwrite) const {
     auto fsize = request->GetDataSize();
     if (fsize <= 0) {
-        return ReceiverErrorTemplates::kBadRequest.Generate("wrong file size");
+        auto err = ReceiverErrorTemplates::kBadRequest.Generate("wrong file size");
+        err->AddDetails("size",std::to_string(fsize));
+        return err;
     }
 
     auto data = request->GetData();
@@ -27,7 +30,8 @@ Error WriteFileProcessor::ProcessFile(const Request* request, bool overwrite) co
 
     err =  io__->WriteDataToFile(root_folder, fname, (uint8_t*)data, (size_t) fsize, true, overwrite);
     if (!err) {
-        log__->Debug("saved file of size " + std::to_string(fsize) + " to " + root_folder + kPathSeparator + fname);
+        log__->Debug(RequestLog("saved file", request).Append("size",std::to_string(fsize)).Append("name",
+                                                                                                      root_folder + kPathSeparator + fname));
     }
 
     return err;
diff --git a/receiver/src/request_handler/request_handler_authorize.cpp b/receiver/src/request_handler/request_handler_authorize.cpp
index f29681fec5f094fadd82c1394605d7d985d90933..bb158c38e4d168ea1d71292edc79091f196e02a7 100644
--- a/receiver/src/request_handler/request_handler_authorize.cpp
+++ b/receiver/src/request_handler/request_handler_authorize.cpp
@@ -15,7 +15,7 @@ Error RequestHandlerAuthorize::CheckVersion(const Request* request) const {
     int verService = VersionToNumber(GetReceiverApiVersion());
     if (verClient > verService) {
         auto err = asapo::ReceiverErrorTemplates::kUnsupportedClient.Generate();
-        err->AddContext("client",version_from_client)->AddContext("server",GetReceiverApiVersion());
+        err->AddDetails("client", version_from_client)->AddDetails("server", GetReceiverApiVersion());
         return err;
     }
     return nullptr;
diff --git a/receiver/src/request_handler/request_handler_db.cpp b/receiver/src/request_handler/request_handler_db.cpp
index fea6e82467656c132cd83a3300bb24f38f659d05..54b9fadc0c8e1c755cdf4bd8d7342e88325b86dc 100644
--- a/receiver/src/request_handler/request_handler_db.cpp
+++ b/receiver/src/request_handler/request_handler_db.cpp
@@ -43,13 +43,14 @@ Error RequestHandlerDb::GetDatabaseServerUri(std::string* uri) const {
                      Append("origin", GetReceiverConfig()->discovery_server));
         auto err = ReceiverErrorTemplates::kInternalServerError.Generate("http error while discovering database server",
                 std::move(http_err));
-        err->AddContext("discoveryEndpoint",GetReceiverConfig()->discovery_server);
+        err->AddDetails("discoveryEndpoint", GetReceiverConfig()->discovery_server);
         return err;
     }
 
     if (code != HttpCode::OK) {
         auto err =  ReceiverErrorTemplates::kInternalServerError.Generate("error when discover database server");
-        err->AddContext("discoveryEndpoint",GetReceiverConfig()->discovery_server)->AddContext("errorCode",std::to_string((int) code));
+        err->AddDetails("discoveryEndpoint", GetReceiverConfig()->discovery_server)->AddDetails("errorCode",
+                                                                                                std::to_string((int) code));
         return err;
     }
 
@@ -80,9 +81,9 @@ Error RequestHandlerDb::DBErrorToReceiverError(Error err) const {
     Error return_err;
     if (err == DBErrorTemplates::kWrongInput || err == DBErrorTemplates::kNoRecord
             || err == DBErrorTemplates::kJsonParseError) {
-        return_err = ReceiverErrorTemplates::kBadRequest.Generate();
+        return_err = ReceiverErrorTemplates::kBadRequest.Generate("error from database");
     } else {
-        return_err = ReceiverErrorTemplates::kInternalServerError.Generate();
+        return_err = ReceiverErrorTemplates::kInternalServerError.Generate("error from database");
     }
     return_err->SetCause(std::move(err));
     return return_err;
diff --git a/receiver/src/request_handler/request_handler_receive_data.cpp b/receiver/src/request_handler/request_handler_receive_data.cpp
index 8c840c0266e684acae2eb7f3e4801c710102e44c..e81a5e30402ef70c533930b0b4b662ad42933146 100644
--- a/receiver/src/request_handler/request_handler_receive_data.cpp
+++ b/receiver/src/request_handler/request_handler_receive_data.cpp
@@ -18,8 +18,16 @@ Error RequestHandlerReceiveData::ProcessRequest(Request* request) const {
     if (err) {
         return err;
     }
-    io__->Receive(request->GetSocket(), request->GetData(), (size_t) request->GetDataSize(), &err);
+    Error io_err;
+    io__->Receive(request->GetSocket(), request->GetData(), (size_t) request->GetDataSize(), &io_err);
+    if (io_err) {
+        err = ReceiverErrorTemplates::kProcessingError.Generate("cannot receive data",std::move(io_err));
+    }
     request->UnlockDataBufferIfNeeded();
+    if (err == nullptr) {
+        log__->Debug(RequestLog("received request data", request).Append("size",request->GetDataSize()));
+    }
+
     return err;
 }
 
diff --git a/receiver/src/request_handler/request_handler_receive_metadata.cpp b/receiver/src/request_handler/request_handler_receive_metadata.cpp
index 71aab0b7423f5c767aa03075b2eb5e25bea1f41d..6e0826a15012a7875853109d90cf6933d7c3b93f 100644
--- a/receiver/src/request_handler/request_handler_receive_metadata.cpp
+++ b/receiver/src/request_handler/request_handler_receive_metadata.cpp
@@ -15,9 +15,9 @@ Error RequestHandlerReceiveMetaData::ProcessRequest(Request* request) const {
     auto buf = std::unique_ptr<uint8_t[]> {new uint8_t[meta_size]};
     io__->Receive(request->GetSocket(), (void*) buf.get(), meta_size, &err);
     if (err) {
-        return err;
+        return ReceiverErrorTemplates::kProcessingError.Generate("cannot receive metadata",std::move(err));
     }
-
+    log__->Debug(RequestLog("received request metadata", request).Append("size",meta_size));
     request->SetMetadata(std::string((char*)buf.get(), meta_size));
     return nullptr;
 }
diff --git a/receiver/unittests/receiver_data_server/net_server/test_rds_tcp_server.cpp b/receiver/unittests/receiver_data_server/net_server/test_rds_tcp_server.cpp
index 22a0aae334f80e050b2d7470f614f2ee59276daa..a80f9f0c86dc125a5c4c33531633bb379dbc80e2 100644
--- a/receiver/unittests/receiver_data_server/net_server/test_rds_tcp_server.cpp
+++ b/receiver/unittests/receiver_data_server/net_server/test_rds_tcp_server.cpp
@@ -126,10 +126,6 @@ void RdsTCPServerTests::MockReceiveRequest(bool ok ) {
             DoAll(SetArgPointee<3>(ok ? nullptr : asapo::IOErrorTemplates::kUnknownIOError.Generate().release()),
                   Return(0))
         );
-        if (!ok) {
-            std::string connected_uri = std::to_string(conn);
-            EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("request"), HasSubstr(connected_uri))));
-        }
     }
 }
 
@@ -163,8 +159,7 @@ void RdsTCPServerTests::ExpectReceiveOk() {
                 A_ReceiveData(asapo::kOpcodeGetBufferData, conn),
                 testing::ReturnArg<2>()
             ));
-        EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("request"), HasSubstr("id: " + std::to_string(conn)),
-                                             HasSubstr("opcode: " + std::to_string(asapo::kOpcodeGetBufferData)))));
+        EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("request"), HasSubstr(std::to_string(conn)))));
     }
 }
 
@@ -249,8 +244,6 @@ TEST_F(RdsTCPServerTests, SendResponse) {
             Return(1)
         ));
 
-    EXPECT_CALL(mock_logger, Error(HasSubstr("cannot send")));
-
     auto err = tcp_server.SendResponse(&expectedRequest, &tmp);
 
     ASSERT_THAT(err, Ne(nullptr));
@@ -272,8 +265,6 @@ TEST_F(RdsTCPServerTests, SendResponseAndSlotData_SendResponseError) {
                   testing::SetArgPointee<3>(asapo::IOErrorTemplates::kUnknownIOError.Generate().release()),
                   Return(0)
               ));
-    EXPECT_CALL(mock_logger, Error(HasSubstr("cannot send")));
-
     auto err = tcp_server.SendResponseAndSlotData(&expectedRequest, &tmp, &expectedMeta);
 
     ASSERT_THAT(err, Ne(nullptr));
@@ -298,8 +289,6 @@ TEST_F(RdsTCPServerTests, SendResponseAndSlotData_SendError) {
             Return(0)
         ));
 
-    EXPECT_CALL(mock_logger, Error(HasSubstr("cannot send")));
-
     auto err = tcp_server.SendResponseAndSlotData(&expectedRequest, &tmp, &expectedMeta);
 
     ASSERT_THAT(err, Ne(nullptr));
diff --git a/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp b/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
index fb85f18e56798d664529c1223ad59e39dce1c61d..b6a0951a3ccba60c097091aecabfb2306396c3ca 100644
--- a/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
+++ b/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
@@ -112,7 +112,7 @@ TEST_F(RequestHandlerTests, RequestAlwaysReady) {
 
 TEST_F(RequestHandlerTests, ProcessRequest_WrongOpCode) {
     request.header.op_code = asapo::kOpcodeUnknownOp;
-    MockSendResponse(asapo::kNetErrorWrongRequest, false);
+    MockSendResponse(asapo::kNetErrorWrongRequest, true);
     EXPECT_CALL(mock_net, HandleAfterError_t(expected_source_id));
 
     EXPECT_CALL(mock_logger, Error(HasSubstr("wrong request")));
@@ -124,7 +124,7 @@ TEST_F(RequestHandlerTests, ProcessRequest_WrongOpCode) {
 
 TEST_F(RequestHandlerTests, ProcessRequest_WrongClientVersion) {
     strcpy(request.header.api_version, "v0.2");
-    MockSendResponse(asapo::kNetErrorNotSupported, false);
+    MockSendResponse(asapo::kNetErrorNotSupported, true);
     EXPECT_CALL(mock_net, HandleAfterError_t(expected_source_id));
 
     EXPECT_CALL(mock_logger, Error(HasSubstr("unsupported client")));
diff --git a/receiver/unittests/receiver_data_server/test_receiver_data_server.cpp b/receiver/unittests/receiver_data_server/test_receiver_data_server.cpp
index b3ce88a40f7cc44333e7cac70eed46aa4ad6adb4..004915c1f8694fa9150574ddc61a48e4d491d10e 100644
--- a/receiver/unittests/receiver_data_server/test_receiver_data_server.cpp
+++ b/receiver/unittests/receiver_data_server/test_receiver_data_server.cpp
@@ -94,8 +94,6 @@ TEST_F(ReceiverDataServerTests, TimeoutGetNewRequests) {
     data_server.Run();
 }
 
-
-
 TEST_F(ReceiverDataServerTests, ErrorGetNewRequests) {
     EXPECT_CALL(mock_net, GetNewRequests_t(_)).WillOnce(
         DoAll(SetArgPointee<0>(asapo::IOErrorTemplates::kUnknownIOError.Generate().release()),
@@ -103,9 +101,7 @@ TEST_F(ReceiverDataServerTests, ErrorGetNewRequests) {
              )
     );
 
-    auto errtext = asapo::IOErrorTemplates::kUnknownIOError.Generate()->Explain();
-
-    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("stopped"), HasSubstr(errtext))));
+    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("stopped"), HasSubstr("unknown error"))));
 
     data_server.Run();
 }
@@ -121,9 +117,7 @@ TEST_F(ReceiverDataServerTests, ErrorAddingRequests) {
         Return(asapo::ReceiverDataServerErrorTemplates::kMemoryPool.Generate("cannot add request to pool").release())
     );
 
-    auto errtext = asapo::ReceiverDataServerErrorTemplates::kMemoryPool.Generate("cannot add request to pool")->Explain();
-
-    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("stopped"), HasSubstr(errtext))));
+    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("stopped"), HasSubstr("pool"))));
 
     data_server.Run();
 }
diff --git a/receiver/unittests/request_handler/file_processors/test_receive_file_processor.cpp b/receiver/unittests/request_handler/file_processors/test_receive_file_processor.cpp
index 0fe70c057a2890bc0cb0a01fe5c8d7aabb419014..518ef7d189c458066af15c17d7af883ce99db193 100644
--- a/receiver/unittests/request_handler/file_processors/test_receive_file_processor.cpp
+++ b/receiver/unittests/request_handler/file_processors/test_receive_file_processor.cpp
@@ -27,7 +27,7 @@ class ReceiveFileProcessorTests : public Test {
   public:
     ReceiveFileProcessor processor;
     NiceMock<MockIO> mock_io;
-    std::unique_ptr<MockRequest> mock_request;
+    std::unique_ptr<NiceMock<MockRequest>> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
     SocketDescriptor expected_socket_id = SocketDescriptor{1};
     std::string expected_file_name = std::string("processed") + asapo::kPathSeparator + std::string("2");
@@ -54,8 +54,10 @@ class ReceiveFileProcessorTests : public Test {
         asapo::ReceiverConfig test_config;
         asapo::SetReceiverConfig(test_config, "none");
         processor.log__ = &mock_logger;
-        mock_request.reset(new MockRequest{request_header, 1, "", nullptr});
+        mock_request.reset(new NiceMock<MockRequest>{request_header, 1, "", nullptr});
         processor.io__ = std::unique_ptr<asapo::IO> {&mock_io};
+        SetDefaultRequestCalls(mock_request.get(),expected_beamtime_id);
+
     }
     void TearDown() override {
         processor.io__.release();
@@ -118,8 +120,6 @@ TEST_F(ReceiveFileProcessorTests, WritesToLog) {
     .WillOnce(Return(nullptr));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("received file"),
-                                         HasSubstr(expected_file_name),
-                                         HasSubstr(expected_beamtime_id),
                                          HasSubstr(std::to_string(expected_file_size))
                                         )
                                   )
diff --git a/receiver/unittests/request_handler/file_processors/test_write_file_processor.cpp b/receiver/unittests/request_handler/file_processors/test_write_file_processor.cpp
index bead654171b2962415c58e6e9c28aa08c6c9d4b8..5a4e77115230a99db29e52e9e3edd96d0179daf4 100644
--- a/receiver/unittests/request_handler/file_processors/test_write_file_processor.cpp
+++ b/receiver/unittests/request_handler/file_processors/test_write_file_processor.cpp
@@ -27,7 +27,7 @@ class WriteFileProcessorTests : public Test {
   public:
     WriteFileProcessor processor;
     NiceMock<MockIO> mock_io;
-    std::unique_ptr<MockRequest> mock_request;
+    std::unique_ptr<NiceMock<MockRequest>> mock_request;
     NiceMock<asapo::MockLogger> mock_logger;
     std::string expected_file_name = std::string("raw") + asapo::kPathSeparator + std::string("2");
     asapo::SourceType expected_source_type = asapo::SourceType::kRaw;
@@ -53,8 +53,10 @@ class WriteFileProcessorTests : public Test {
         asapo::ReceiverConfig test_config;
         asapo::SetReceiverConfig(test_config, "none");
         processor.log__ = &mock_logger;
-        mock_request.reset(new MockRequest{request_header, 1, "", nullptr});
+        mock_request.reset(new NiceMock<MockRequest>{request_header, 1, "", nullptr});
         processor.io__ = std::unique_ptr<asapo::IO> {&mock_io};
+        SetDefaultRequestCalls(mock_request.get(),expected_beamtime_id);
+
     }
     void TearDown() override {
         processor.io__.release();
@@ -114,15 +116,8 @@ TEST_F(WriteFileProcessorTests, WritesToLog) {
 
     ExpectFileWrite(nullptr);
 
-    EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("saved file"),
-                                         HasSubstr(expected_file_name),
-                                         HasSubstr(expected_beamtime_id),
-                                         HasSubstr(expected_facility),
-                                         HasSubstr(expected_year),
-                                         HasSubstr(std::to_string(expected_file_size))
-                                        )
-                                  )
-               );
+    EXPECT_CALL(mock_logger, Debug(HasSubstr("saved file")));
+
     auto err = processor.ProcessFile(mock_request.get(), expected_overwrite);
     ASSERT_THAT(err, Eq(nullptr));
 }
diff --git a/receiver/unittests/request_handler/test_request_handler_receive_data.cpp b/receiver/unittests/request_handler/test_request_handler_receive_data.cpp
index d3e679b7c4f57a3459975acf9e0ff18698001b82..8d734e18cad50406bf4e1b06dad2acb6da103e30 100644
--- a/receiver/unittests/request_handler/test_request_handler_receive_data.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_receive_data.cpp
@@ -109,7 +109,7 @@ TEST_F(ReceiveDataHandlerTests, HandleDoesNotReceiveDataWhenMetadataOnlyWasSent)
 TEST_F(ReceiveDataHandlerTests, HandleReturnsErrorOnDataReceive) {
     ExpectReceiveData(false);
     auto err = handler.ProcessRequest(request.get());
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kReadError));
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kProcessingError));
 }
 
 TEST_F(ReceiveDataHandlerTests, HandleReturnsOK) {
diff --git a/receiver/unittests/request_handler/test_request_handler_receive_metadata.cpp b/receiver/unittests/request_handler/test_request_handler_receive_metadata.cpp
index 99b3e4ed5136293068adacbfa3413e763c858266..0b15844d47183ea15cb24edda92596dafa46030d 100644
--- a/receiver/unittests/request_handler/test_request_handler_receive_metadata.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_receive_metadata.cpp
@@ -87,7 +87,7 @@ TEST_F(ReceiveMetaDataHandlerTests, CheckStatisticEntity) {
 TEST_F(ReceiveMetaDataHandlerTests, HandleReturnsErrorOnMetaDataReceive) {
     ExpectReceiveMetaData(false);
     auto err = handler.ProcessRequest(request.get());
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kReadError));
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kProcessingError));
 }
 
 TEST_F(ReceiveMetaDataHandlerTests, HandleReturnsOK) {
diff --git a/tests/automatic/producer/aai/check_windows.bat b/tests/automatic/producer/aai/check_windows.bat
index 28a3e3c9d0e230b2f91e1bba3df56382218b17d9..36373fedab7f1d50403c69f39b8aec901b79a2f5 100644
--- a/tests/automatic/producer/aai/check_windows.bat
+++ b/tests/automatic/producer/aai/check_windows.bat
@@ -33,7 +33,7 @@ echo %NUM% | findstr 3 || goto error
 for /F %%N in ('find /C "reauthorization" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 1 || goto error
 
-for /F %%N in ('find /C "} error: server warning, context: response:duplicated request" ^< "out"') do set NUM=%%N
+for /F %%N in ('find /C "} error: server warning, details: response:duplicated request" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 1 || goto error
 
 goto :clean
diff --git a/tests/automatic/producer/python_api/check_windows.bat b/tests/automatic/producer/python_api/check_windows.bat
index 5920874f07bc788076de2dc13087d312561e8fb8..78389910ead22f0ccb8ac2692679345c9c18ec4a 100644
--- a/tests/automatic/producer/python_api/check_windows.bat
+++ b/tests/automatic/producer/python_api/check_windows.bat
@@ -20,13 +20,13 @@ set NUM=0
 for /F %%N in ('find /C "successfuly sent" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 17 || goto error
 
-for /F %%N in ('find /C "} error: wrong input, context: response:error: Bad request, message: already have record with same id" ^< "out"') do set NUM=%%N
+for /F %%N in ('find /C "} error: wrong input, details: response:error: Bad request, message: already have record with same id" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 2 || goto error
 
-for /F %%N in ('find /C "} error: server warning, context: response:ignoring duplicate record" ^< "out"') do set NUM=%%N
+for /F %%N in ('find /C "} error: server warning, details: response:ignoring duplicate record" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 2 || goto error
 
-for /F %%N in ('find /C "} error: server warning, context: response:duplicated request" ^< "out"') do set NUM=%%N
+for /F %%N in ('find /C "} error: server warning, details: response:duplicated request" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 1 || goto error
 
 
diff --git a/tests/automatic/system_io/read_file_content/CMakeLists.txt b/tests/automatic/system_io/read_file_content/CMakeLists.txt
index 5f302cdb12e8f9f74b452a88b5370b04dd26761e..c4a37e8638a3f273847a6177be065409fd7098d5 100644
--- a/tests/automatic/system_io/read_file_content/CMakeLists.txt
+++ b/tests/automatic/system_io/read_file_content/CMakeLists.txt
@@ -17,6 +17,6 @@ set_target_properties(${TARGET_NAME} PROPERTIES LINKER_LANGUAGE CXX)
 add_test_setup_cleanup(${TARGET_NAME})
 add_integration_test(${TARGET_NAME} readfile "test/1 123")
 add_integration_test(${TARGET_NAME} readfile_unkown_size "test/2 unknown_size")
-add_integration_test(${TARGET_NAME} filenotfound "test_notexist error:Nosuchfileordirectory,context:name:test_notexist")
-add_integration_test(${TARGET_NAME} filenoaccess "file_noaccess error:Permissiondenied,context:name:file_noaccess")
+add_integration_test(${TARGET_NAME} filenotfound "test_notexist error:nosuchfileordirectory,details:name:test_notexist")
+add_integration_test(${TARGET_NAME} filenoaccess "file_noaccess error:permissiondenied,details:name:file_noaccess")
 
diff --git a/tests/automatic/system_io/read_folder_content/CMakeLists.txt b/tests/automatic/system_io/read_folder_content/CMakeLists.txt
index 42bc1ec48ee536502e96f50e1ae26b59fac96683..c6938f921ac62bc9d31e8fdb0e040ad69fbfd450 100644
--- a/tests/automatic/system_io/read_folder_content/CMakeLists.txt
+++ b/tests/automatic/system_io/read_folder_content/CMakeLists.txt
@@ -27,6 +27,6 @@ ELSE()
 ENDIF(WIN32)
 
 
-add_integration_test(${TARGET_NAME} foldernotfound "test_notexist error:Nosuchfileordirectory,context:name:test_notexist")
-add_integration_test(${TARGET_NAME} foldernoaccess "test_noaccess1 error:Permissiondenied,context:name:test_noaccess1")
+add_integration_test(${TARGET_NAME} foldernotfound "test_notexist error:nosuchfileordirectory,details:name:test_notexist")
+add_integration_test(${TARGET_NAME} foldernoaccess "test_noaccess1 error:permissiondenied,details:name:test_noaccess1")
 
diff --git a/tests/automatic/system_io/read_string_from_file/CMakeLists.txt b/tests/automatic/system_io/read_string_from_file/CMakeLists.txt
index 8cfa3aed8aecbc37b1b8643f8812e9687c5f433d..35645add62b86d0e66e05ca6e7221519efbf7e41 100644
--- a/tests/automatic/system_io/read_string_from_file/CMakeLists.txt
+++ b/tests/automatic/system_io/read_string_from_file/CMakeLists.txt
@@ -16,6 +16,6 @@ set_target_properties(${TARGET_NAME} PROPERTIES LINKER_LANGUAGE CXX)
 
 add_test_setup_cleanup(${TARGET_NAME})
 add_integration_test(${TARGET_NAME} readfile "test/1 123")
-add_integration_test(${TARGET_NAME} filenotfound "test_notexist error:Nosuchfileordirectory,context:name:test_notexist")
-add_integration_test(${TARGET_NAME} filenoaccess "file_noaccess error:Permissiondenied,context:name:file_noaccess")
+add_integration_test(${TARGET_NAME} filenotfound "test_notexist error:nosuchfileordirectory,details:name:test_notexist")
+add_integration_test(${TARGET_NAME} filenoaccess "file_noaccess error:permissiondenied,details:name:file_noaccess")
 
diff --git a/tests/automatic/system_io/read_subdirectories/CMakeLists.txt b/tests/automatic/system_io/read_subdirectories/CMakeLists.txt
index ab9b1908fa01a83bee13dfd7bf19ea9332c19cd4..4ec44441e63fe7601eda52d39450c788fc4a43aa 100644
--- a/tests/automatic/system_io/read_subdirectories/CMakeLists.txt
+++ b/tests/automatic/system_io/read_subdirectories/CMakeLists.txt
@@ -27,6 +27,6 @@ ELSE()
 ENDIF(WIN32)
 
 
-add_integration_test(${TARGET_NAME} foldernotfound "test_notexist error:Nosuchfileordirectory,context:name:test_notexist")
-add_integration_test(${TARGET_NAME} foldernoaccess "test_noaccess1 error:Permissiondenied,context:name:test_noaccess1")
+add_integration_test(${TARGET_NAME} foldernotfound "test_notexist error:nosuchfileordirectory,details:name:test_notexist")
+add_integration_test(${TARGET_NAME} foldernoaccess "test_noaccess1 error:permissiondenied,details:name:test_noaccess1")
 
diff --git a/tests/automatic/system_io/write_data_to_file/CMakeLists.txt b/tests/automatic/system_io/write_data_to_file/CMakeLists.txt
index 3e7a13c469495cc372c2132356ca9f1efc8d3993..c062aa99347424f9b35ec8ec7b87cf102bf9f559 100644
--- a/tests/automatic/system_io/write_data_to_file/CMakeLists.txt
+++ b/tests/automatic/system_io/write_data_to_file/CMakeLists.txt
@@ -22,5 +22,5 @@ else ()
 endif()
 
 add_integration_test(${TARGET_NAME} writetwice "test_file ok dummy" nomem)
-add_integration_test(${TARGET_NAME} dirnoaccess "test_noaccess/test_file error error:Permissiondenied,context:name:test_noaccess/test_file" nomem)
+add_integration_test(${TARGET_NAME} dirnoaccess "test_noaccess/test_file error error:permissiondenied,details:name:test_noaccess/test_file" nomem)