diff --git a/CMakeLists.txt b/CMakeLists.txt
index 1356c10a541de3db83870d6fa343cd268899434f..45af391cd03a63110fa2a773c2d924ed331ec473 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -11,6 +11,7 @@ set (ASAPO_BROKER_API_VER "v0.4")
 set (ASAPO_FILE_TRANSFER_SERVICE_API_VER "v0.2")
 set (ASAPO_RECEIVER_API_VER "v0.3")
 set (ASAPO_RDS_API_VER "v0.1")
+set (DB_SCHEMA_VER "v0.1")
 
 set(CMAKE_CXX_STANDARD 11)
 IF(WIN32)
diff --git a/broker/src/asapo_broker/database/database.go b/broker/src/asapo_broker/database/database.go
index 0bb12f25217b93f03e3fe4556e6c2c060864d5b1..7e48a6e526a07a1d316f240acba65384c32829a2 100644
--- a/broker/src/asapo_broker/database/database.go
+++ b/broker/src/asapo_broker/database/database.go
@@ -3,13 +3,13 @@ package database
 import "asapo_common/utils"
 
 type Request struct {
-	DbName string
-	DbCollectionName string
-	GroupId string
-	Op string
-	DatasetOp bool
+	DbName         string
+	Stream         string
+	GroupId        string
+	Op             string
+	DatasetOp      bool
 	MinDatasetSize int
-	ExtraParam string
+	ExtraParam     string
 }
 
 type Agent interface {
diff --git a/broker/src/asapo_broker/database/encoding.go b/broker/src/asapo_broker/database/encoding.go
index 7f08397975d94553a61a87dad251dbdb55ec2864..6e61d95f0d6a6fe462aca48754223d38da30616b 100644
--- a/broker/src/asapo_broker/database/encoding.go
+++ b/broker/src/asapo_broker/database/encoding.go
@@ -85,8 +85,8 @@ func encodeRequest(request *Request) error {
 		return &DBError{utils.StatusWrongInput, "source name is too long"}
 	}
 
-	request.DbCollectionName = encodeStringForColName(request.DbCollectionName)
-	if len(request.DbCollectionName)> max_encoded_stream_size {
+	request.Stream = encodeStringForColName(request.Stream)
+	if len(request.Stream)> max_encoded_stream_size {
 		return &DBError{utils.StatusWrongInput, "stream name is too long"}
 	}
 
diff --git a/broker/src/asapo_broker/database/encoding_test.go b/broker/src/asapo_broker/database/encoding_test.go
index 1def90c99f6a2268883530be39b62fbb01eabb96..1b018289e8f1d6271b97b4a6a1e6ff9925e24ffe 100644
--- a/broker/src/asapo_broker/database/encoding_test.go
+++ b/broker/src/asapo_broker/database/encoding_test.go
@@ -18,16 +18,16 @@ func TestEncoding(t *testing.T) {
 	assert.Equal(t, sourceDecoded, source)
 
 	r := Request{
-		DbName:           source,
-		DbCollectionName: stream,
-		GroupId:          stream,
-		Op:               "",
-		DatasetOp:        false,
-		MinDatasetSize:   0,
-		ExtraParam:       "",
+		DbName:         source,
+		Stream:         stream,
+		GroupId:        stream,
+		Op:             "",
+		DatasetOp:      false,
+		MinDatasetSize: 0,
+		ExtraParam:     "",
 	}
 	err := encodeRequest(&r)
-	assert.Equal(t, r.DbCollectionName, streamEncoded)
+	assert.Equal(t, r.Stream, streamEncoded)
 	assert.Equal(t, r.GroupId, streamEncoded)
 	assert.Equal(t, r.DbName, sourceEncoded)
 
@@ -63,13 +63,13 @@ func TestEncodingTooLong(t *testing.T) {
 		group := RandomString(test.groupSize)
 		source := RandomString(test.sourceSize)
 		r := Request{
-			DbName:           source,
-			DbCollectionName: stream,
-			GroupId:          group,
-			Op:               "",
-			DatasetOp:        false,
-			MinDatasetSize:   0,
-			ExtraParam:       "",
+			DbName:         source,
+			Stream:         stream,
+			GroupId:        group,
+			Op:             "",
+			DatasetOp:      false,
+			MinDatasetSize: 0,
+			ExtraParam:     "",
 		}
 		err := encodeRequest(&r)
 		if test.ok {
diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go
index 75c885f30ee4290bf9d5980e2f23ebd25b4e9000..d248f016fd2cd719da9dc8c0b86f4c1803142e8d 100644
--- a/broker/src/asapo_broker/database/mongodb.go
+++ b/broker/src/asapo_broker/database/mongodb.go
@@ -182,7 +182,7 @@ func maxIndexQuery(request Request, returnIncompete bool) bson.M {
 }
 
 func (db *Mongodb) getMaxIndex(request Request, returnIncompete bool) (max_id int, err error) {
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
 	q := maxIndexQuery(request, returnIncompete)
 
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
@@ -211,7 +211,7 @@ func (db *Mongodb) setCounter(request Request, ind int) (err error) {
 	update := bson.M{"$set": bson.M{pointer_field_name: ind}}
 	opts := options.Update().SetUpsert(true)
 	c := db.client.Database(request.DbName).Collection(pointer_collection_name)
-	q := bson.M{"_id": request.GroupId + "_" + request.DbCollectionName}
+	q := bson.M{"_id": request.GroupId + "_" + request.Stream}
 	_, err = c.UpdateOne(context.TODO(), q, update, opts)
 	return
 }
@@ -228,7 +228,7 @@ func (db *Mongodb) errorWhenCannotIncrementField(request Request, max_ind int) (
 func (db *Mongodb) incrementField(request Request, max_ind int, res interface{}) (err error) {
 	update := bson.M{"$inc": bson.M{pointer_field_name: 1}}
 	opts := options.FindOneAndUpdate().SetUpsert(true).SetReturnDocument(options.After)
-	q := bson.M{"_id": request.GroupId + "_" + request.DbCollectionName, pointer_field_name: bson.M{"$lt": max_ind}}
+	q := bson.M{"_id": request.GroupId + "_" + request.Stream, pointer_field_name: bson.M{"$lt": max_ind}}
 	c := db.client.Database(request.DbName).Collection(pointer_collection_name)
 
 	err = c.FindOneAndUpdate(context.TODO(), q, update, opts).Decode(res)
@@ -283,7 +283,7 @@ func recordContainsPartialData(request Request, rec map[string]interface{}) bool
 
 func (db *Mongodb) getRecordFromDb(request Request, id, id_max int) (res map[string]interface{}, err error) {
 	q := bson.M{"_id": id}
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
 	err = c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
 	if err != nil {
 		answer := encodeAnswer(id, id_max, "")
@@ -369,7 +369,7 @@ func (db *Mongodb) negAckRecord(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, input.Id, input.Params.DelayMs, 1, true)
+	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, input.Id, input.Params.DelayMs, 1, true)
 	return []byte(""), err
 }
 
@@ -379,7 +379,7 @@ func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 	if err != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err = c.InsertOne(context.Background(), &record)
 	if err != nil {
 		if duplicateError(err) {
@@ -388,7 +388,7 @@ func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 		return nil, err
 	}
 
-	c = db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+	c = db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err_del := c.DeleteOne(context.Background(), bson.M{"_id": record.ID})
 	if err_del != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
@@ -402,7 +402,7 @@ func (db *Mongodb) checkDatabaseOperationPrerequisites(request Request) error {
 		return &DBError{utils.StatusServiceUnavailable, no_session_msg}
 	}
 
-	if len(request.DbName) == 0 || len(request.DbCollectionName) == 0 {
+	if len(request.DbName) == 0 || len(request.Stream) == 0 {
 		return &DBError{utils.StatusWrongInput, "beamtime_id ans stream must be set"}
 	}
 
@@ -497,10 +497,10 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 	}
 	tNow := time.Now().Unix()
 	dbSessionLock.Lock()
-	t := db.lastReadFromInprocess[request.DbCollectionName+"_"+request.GroupId]
+	t := db.lastReadFromInprocess[request.Stream+"_"+request.GroupId]
 	dbSessionLock.Unlock()
 	if (t <= tNow-int64(db.settings.ReadFromInprocessPeriod)) || ignoreTimeout {
-		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, delayMs, nResendAttempts)
+		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, delayMs, nResendAttempts)
 		if err != nil {
 			log_str := "error getting unprocessed id " + request.DbName + ", groupid: " + request.GroupId + ":" + err.Error()
 			logger.Debug(log_str)
@@ -514,7 +514,7 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 		}
 	} else {
 		dbSessionLock.Lock()
-		db.lastReadFromInprocess[request.DbCollectionName+"_"+request.GroupId] = time.Now().Unix()
+		db.lastReadFromInprocess[request.Stream+"_"+request.GroupId] = time.Now().Unix()
 		dbSessionLock.Unlock()
 	}
 
@@ -595,7 +595,7 @@ func checkStreamFinished(request Request, id, id_max int, data map[string]interf
 	if !ok || !r.FinishedStream {
 		return nil
 	}
-	log_str := "reached end of stream " + request.DbCollectionName + " , next_stream: " + r.NextStream
+	log_str := "reached end of stream " + request.Stream + " , next_stream: " + r.NextStream
 	logger.Debug(log_str)
 
 	answer := encodeAnswer(r.ID-1, r.ID-1, r.NextStream)
@@ -614,7 +614,7 @@ func (db *Mongodb) getNextRecord(request Request) ([]byte, error) {
 	}
 
 	if err == nil {
-		err_update := db.InsertToInprocessIfNeeded(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, nextInd, request.ExtraParam)
+		err_update := db.InsertToInprocessIfNeeded(request.DbName, inprocess_collection_name_prefix+request.Stream+"_"+request.GroupId, nextInd, request.ExtraParam)
 		if err_update != nil {
 			return nil, err_update
 		}
@@ -642,7 +642,7 @@ func getSizeFilter(request Request) bson.M {
 }
 
 func (db *Mongodb) getSize(request Request) ([]byte, error) {
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
 
 	filter := getSizeFilter(request)
 	size, err := c.CountDocuments(context.TODO(), filter, options.Count())
@@ -669,7 +669,7 @@ func (db *Mongodb) resetCounter(request Request) ([]byte, error) {
 		return []byte(""), err
 	}
 
-	c := db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+	c := db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	_, err_del := c.DeleteMany(context.Background(), bson.M{"_id": bson.M{"$gte": id}})
 	if err_del != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
@@ -678,24 +678,40 @@ func (db *Mongodb) resetCounter(request Request) ([]byte, error) {
 	return []byte(""), nil
 }
 
+func getMetaId(request Request) (string, error) {
+	switch request.ExtraParam {
+	case "0":
+		return "bt", nil
+	case "1":
+		return "st_" + request.Stream, nil
+	default:
+		return "", &DBError{utils.StatusWrongInput, "wrong meta type"}
+	}
+}
+
 func (db *Mongodb) getMeta(request Request) ([]byte, error) {
-	id, err := strconv.Atoi(request.ExtraParam)
+	id, err := getMetaId(request)
 	if err != nil {
 		return nil, err
 	}
-
-	var res map[string]interface{}
 	q := bson.M{"_id": id}
+	var res map[string]interface{}
 	c := db.client.Database(request.DbName).Collection(meta_collection_name)
 	err = c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
 	if err != nil {
-		log_str := "error getting meta with id " + strconv.Itoa(id) + " for " + request.DbName + " : " + err.Error()
+		log_str := "error getting meta for " + id + " in " + request.DbName + " : " + err.Error()
 		logger.Debug(log_str)
 		return nil, &DBError{utils.StatusNoData, err.Error()}
 	}
-	log_str := "got record id " + strconv.Itoa(id) + " for " + request.DbName
+	userMeta,ok:=res["meta"]
+	if !ok {
+		log_str := "error getting meta for " + id + " in " + request.DbName + " : cannot parse database response"
+		logger.Error(log_str)
+		return nil, errors.New(log_str)
+	}
+	log_str := "got metadata for " + id + " in " + request.DbName
 	logger.Debug(log_str)
-	return utils.MapToJson(&res)
+	return utils.MapToJson(&userMeta)
 }
 
 func (db *Mongodb) processQueryError(query, dbname string, err error) ([]byte, error) {
@@ -713,7 +729,7 @@ func (db *Mongodb) queryMessages(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.Stream)
 	opts := options.Find()
 
 	if len(sort) > 0 {
@@ -832,14 +848,14 @@ func (db *Mongodb) collectionExist(request Request, name string) (bool, error) {
 }
 
 func (db *Mongodb) deleteDataCollection(errorOnNotexist bool, request Request) error {
-	dataCol := data_collection_name_prefix + request.DbCollectionName
+	dataCol := data_collection_name_prefix + request.Stream
 	if errorOnNotexist {
 		exist, err := db.collectionExist(request, dataCol)
 		if err != nil {
 			return err
 		}
 		if !exist {
-			return &DBError{utils.StatusWrongInput, "stream " + request.DbCollectionName + " does not exist"}
+			return &DBError{utils.StatusWrongInput, "stream " + request.Stream + " does not exist"}
 		}
 	}
 	return db.deleteCollection(request, dataCol)
@@ -851,10 +867,10 @@ func (db *Mongodb) deleteDocumentsInCollection(request Request, collection strin
 	return err
 }
 
-func escapeQuery(query string )(res string) {
+func escapeQuery(query string) (res string) {
 	chars := `\-[]{}()*+?.,^$|#`
 	for _, char := range chars {
-		query = strings.ReplaceAll(query,string(char),`\`+string(char))
+		query = strings.ReplaceAll(query, string(char), `\`+string(char))
 	}
 	return query
 }
@@ -877,15 +893,15 @@ func (db *Mongodb) deleteCollectionsWithPrefix(request Request, prefix string) e
 }
 
 func (db *Mongodb) deleteServiceMeta(request Request) error {
-	err := db.deleteCollectionsWithPrefix(request, acks_collection_name_prefix+request.DbCollectionName)
+	err := db.deleteCollectionsWithPrefix(request, acks_collection_name_prefix+request.Stream)
 	if err != nil {
 		return err
 	}
-	err = db.deleteCollectionsWithPrefix(request, inprocess_collection_name_prefix+request.DbCollectionName)
+	err = db.deleteCollectionsWithPrefix(request, inprocess_collection_name_prefix+request.Stream)
 	if err != nil {
 		return err
 	}
-	return db.deleteDocumentsInCollection(request, pointer_collection_name, "_id", ".*_"+escapeQuery(request.DbCollectionName)+"$")
+	return db.deleteDocumentsInCollection(request, pointer_collection_name, "_id", ".*_"+escapeQuery(request.Stream)+"$")
 }
 
 func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
@@ -903,7 +919,7 @@ func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, "wrong params: " + request.ExtraParam}
 	}
 	if !*params.DeleteMeta {
-		logger.Debug("skipping delete stream meta for " + request.DbCollectionName + " in " + request.DbName)
+		logger.Debug("skipping delete stream meta for " + request.Stream + " in " + request.DbName)
 		return nil, nil
 	}
 
@@ -917,7 +933,7 @@ func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
 }
 
 func (db *Mongodb) lastAck(request Request) ([]byte, error) {
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
 	result := LastAck{0}
 	var q bson.M = nil
@@ -984,7 +1000,7 @@ func extractNacsFromCursor(err error, cursor *mongo.Cursor) ([]int, error) {
 }
 
 func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, error) {
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.Stream + "_" + request.GroupId)
 
 	if res, err, ok := db.canAvoidDbRequest(min_index, max_index, c); ok {
 		return res, err
@@ -1004,7 +1020,6 @@ func (db *Mongodb) getStreams(request Request) ([]byte, error) {
 	return json.Marshal(&rec)
 }
 
-
 func (db *Mongodb) ProcessRequest(request Request) (answer []byte, err error) {
 	if err := db.checkDatabaseOperationPrerequisites(request); err != nil {
 		return nil, err
diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go
index 9b4742ff9c0146509df402fe5295a946806b54c5..e4c8f458d43b84ae615dfd8e0b0ef4a1435c53f2 100644
--- a/broker/src/asapo_broker/database/mongodb_test.go
+++ b/broker/src/asapo_broker/database/mongodb_test.go
@@ -19,6 +19,14 @@ type TestRecord struct {
 	Timestamp int64             `bson:"timestamp" json:"timestamp"`
 }
 
+type TestRecordStreamBtMeta struct {
+	ID        string             `bson:"_id" json:"_id"`
+	Meta      string `bson:"meta" json:"meta"`
+}
+
+var recbt = TestRecordStreamBtMeta{"bt", "meta_bt"}
+var recst = TestRecordStreamBtMeta{"st_stream", "meta_st"}
+
 type TestDataset struct {
 	Timestamp int64        `bson:"timestamp" json:"timestamp"`
 	ID        int64        `bson:"_id" json:"_id"`
@@ -33,8 +41,7 @@ const collection = "stream"
 const collection2 = "stream2"
 const dbaddress = "127.0.0.1:27017"
 const groupId = "bid2a5auidddp1vl71d0"
-const metaID = 0
-const metaID_str = "0"
+const metaID = "bt"
 
 const badSymbolsDb = `/\."$`
 const badSymbolsCol = `$`
@@ -93,31 +100,31 @@ func TestMongoDBConnectOK(t *testing.T) {
 }
 
 func TestMongoDBGetNextErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
 func TestMongoDBGetMetaErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "meta", ExtraParam: "0"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "0"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
 func TestMongoDBQueryMessagesErrorWhenNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "querymessages", ExtraParam: "0"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: "0"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
 func TestMongoDBGetNextErrorWhenWrongDatabasename(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
 }
 
 func TestMongoDBGetNextErrorWhenNonExistingDatacollectionname(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: "bla", GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: "bla", GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -125,7 +132,7 @@ func TestMongoDBGetNextErrorWhenNonExistingDatacollectionname(t *testing.T) {
 func TestMongoDBGetLastErrorWhenNonExistingDatacollectionname(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: "bla", GroupId: groupId, Op: "last"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: "bla", GroupId: groupId, Op: "last"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -133,7 +140,7 @@ func TestMongoDBGetLastErrorWhenNonExistingDatacollectionname(t *testing.T) {
 func TestMongoDBGetByIdErrorWhenNoData(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
@@ -143,7 +150,7 @@ func TestMongoDBGetNextErrorWhenRecordNotThereYet(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec2)
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":2,\"next_stream\":\"\"}", err.Error())
 }
@@ -152,7 +159,7 @@ func TestMongoDBGetNextOK(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -163,8 +170,8 @@ func TestMongoDBGetNextErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -176,9 +183,9 @@ func TestMongoDBGetNextErrorOnFinishedStreamAlways(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -192,7 +199,7 @@ func TestMongoDBGetByIdErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -204,7 +211,7 @@ func TestMongoDBGetLastErrorOnFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last"})
 	fmt.Println(string(res))
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
@@ -214,8 +221,8 @@ func TestMongoDBGetNextErrorOnNoMoreData(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"\"}", err.(*DBError).Message)
@@ -226,8 +233,8 @@ func TestMongoDBGetNextCorrectOrder(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec1)
-	res1, _ := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	res2, _ := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res1, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res2, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, string(rec1_expect), string(res1))
 	assert.Equal(t, string(rec2_expect), string(res2))
 }
@@ -264,7 +271,7 @@ func getRecords(n int, resend bool) []int {
 	for i := 0; i < n; i++ {
 		go func() {
 			defer wg.Done()
-			res_bin, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: extra_param})
+			res_bin, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: extra_param})
 			if err != nil {
 				fmt.Println("error at read ", i)
 			}
@@ -309,13 +316,13 @@ func TestMongoDBGetLastAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(10)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 }
@@ -324,7 +331,7 @@ func TestMongoDBGetNextAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(200)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
 	n := 100
@@ -337,10 +344,10 @@ func TestMongoDBGetNextEmptyAfterErasingDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	insertRecords(10)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	db.dropDatabase(dbname)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0,\"next_stream\":\"\"}", err.Error())
 }
@@ -350,7 +357,7 @@ func TestMongoDBgetRecordByID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -359,7 +366,7 @@ func TestMongoDBgetRecordByIDFails(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":1,\"next_stream\":\"\"}", err.Error())
 }
@@ -368,7 +375,7 @@ func TestMongoDBGetRecordNext(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -379,8 +386,8 @@ func TestMongoDBGetRecordNextMultipleCollections(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection2, &rec_dataset1)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
-	res_string, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection2, GroupId: groupId, Op: "next", DatasetOp: true})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
+	res_string, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection2, GroupId: groupId, Op: "next", DatasetOp: true})
 	var res_ds TestDataset
 	json.Unmarshal(res_string, &res_ds)
 
@@ -396,7 +403,7 @@ func TestMongoDBGetRecordID(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -405,7 +412,7 @@ func TestMongoDBWrongOp(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "bla"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "bla"})
 	assert.NotNil(t, err)
 }
 
@@ -415,7 +422,7 @@ func TestMongoDBGetRecordLast(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 }
@@ -426,13 +433,13 @@ func TestMongoDBGetNextAfterGetLastCorrect(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", ExtraParam: "0"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec2_expect), string(res))
 
 	db.insertRecord(dbname, collection, &rec3)
 
-	res, err = db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res, err = db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 
@@ -445,7 +452,7 @@ func TestMongoDBGetSize(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec3)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(recs1_expect), string(res))
 }
@@ -456,7 +463,7 @@ func TestMongoDBGetSizeWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	var rec_expect, _ = json.Marshal(&SizeRecord{1})
 	assert.Equal(t, string(rec_expect), string(res))
@@ -467,10 +474,10 @@ func TestMongoDBGetSizeForDatasets(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "false"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "false"})
 	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
 
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
 	assert.Equal(t, utils.StatusWrongInput, err1.(*DBError).Code)
 }
 
@@ -480,7 +487,7 @@ func TestMongoDBGetSizeForDatasetsWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	res, _ := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+	res, _ := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
 
 	var rec_expect, _ = json.Marshal(&SizeRecord{1})
 	assert.Equal(t, string(rec_expect), string(res))
@@ -495,7 +502,7 @@ func TestMongoDBGetSizeDataset(t *testing.T) {
 
 	size2_expect, _ := json.Marshal(SizeRecord{2})
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size", ExtraParam: "true"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(size2_expect), string(res))
 }
@@ -504,7 +511,7 @@ func TestMongoDBGetSizeNoRecords(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "size"})
 	assert.Nil(t, err)
 	assert.Equal(t, string(recs2_expect), string(res))
 }
@@ -522,7 +529,7 @@ func TestMongoPingNotConected(t *testing.T) {
 }
 
 func TestMongoDBgetRecordByIDNotConnected(t *testing.T) {
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", ExtraParam: "1"})
 	assert.Equal(t, utils.StatusServiceUnavailable, err.(*DBError).Code)
 }
 
@@ -532,29 +539,39 @@ func TestMongoDBResetCounter(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Nil(t, err1)
 	assert.Equal(t, string(rec1_expect), string(res1))
 
-	_, err_reset := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "1"})
+	_, err_reset := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "1"})
 	assert.Nil(t, err_reset)
 
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 
 	assert.Nil(t, err2)
 	assert.Equal(t, string(rec2_expect), string(res2))
 }
 
-func TestMongoDBGetMetaOK(t *testing.T) {
-	recm := rec1
+func TestMongoDBGetMetaBtOK(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	rec_expect, _ := json.Marshal(recbt.Meta)
+	db.insertMeta(dbname, &recbt)
+
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: "whatever", Op: "meta", ExtraParam: "0"})
+
+	assert.Nil(t, err)
+	assert.Equal(t, string(rec_expect), string(res))
+}
+
+func TestMongoDBGetMetaStOK(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
-	recm.ID = metaID
-	rec_expect, _ := json.Marshal(recm)
-	db.insertMeta(dbname, &recm)
+	rec_expect, _ := json.Marshal(recst.Meta)
+	db.insertMeta(dbname, &recst)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "meta", ExtraParam: metaID_str})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "1"})
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec_expect), string(res))
@@ -564,8 +581,9 @@ func TestMongoDBGetMetaErr(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "meta", ExtraParam: metaID_str})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "meta", ExtraParam: "1"})
 	assert.NotNil(t, err)
+	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
 }
 
 type MetaData struct {
@@ -639,7 +657,7 @@ func TestMongoDBQueryMessagesOK(t *testing.T) {
 		//			continue
 		//		}
 
-		res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "querymessages", ExtraParam: test.query})
+		res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: test.query})
 		var res []TestRecordMeta
 		json.Unmarshal(res_string, &res)
 		//		fmt.Println(string(res_string))
@@ -658,7 +676,7 @@ func TestMongoDBQueryMessagesOnEmptyDatabase(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	for _, test := range tests {
-		res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "querymessages", ExtraParam: test.query})
+		res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, Op: "querymessages", ExtraParam: test.query})
 		var res []TestRecordMeta
 		json.Unmarshal(res_string, &res)
 		assert.Equal(t, 0, len(res))
@@ -684,7 +702,7 @@ func TestMongoDBGetDataset(t *testing.T) {
 
 	db.insertRecord(dbname, collection, &rec_dataset1)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Nil(t, err)
 
@@ -700,7 +718,7 @@ func TestMongoDBNoDataOnNotCompletedFirstDataset(t *testing.T) {
 
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Equal(t, utils.StatusPartialData, err.(*DBError).Code)
 	var res TestDataset
@@ -715,8 +733,8 @@ func TestMongoDBNoDataOnNotCompletedNextDataset(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_dataset2_incomplete)
 
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", DatasetOp: true})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
+	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", DatasetOp: true})
 
 	assert.Equal(t, utils.StatusPartialData, err1.(*DBError).Code)
 	assert.Equal(t, utils.StatusPartialData, err2.(*DBError).Code)
@@ -732,7 +750,7 @@ func TestMongoDBGetRecordLastDataSetSkipsIncompleteSets(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
@@ -749,7 +767,7 @@ func TestMongoDBGetRecordLastDataSetReturnsIncompleteSets(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
@@ -767,7 +785,7 @@ func TestMongoDBGetRecordLastDataSetSkipsIncompleteSetsWithMinSize(t *testing.T)
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2_incomplete3)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
@@ -784,7 +802,7 @@ func TestMongoDBGetRecordLastDataSetWithFinishedStream(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, ExtraParam: "0"})
 
 	assert.NotNil(t, err)
@@ -801,7 +819,7 @@ func TestMongoDBGetRecordLastDataSetWithIncompleteDatasetsAndFinishedStreamRetur
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 	db.insertRecord(dbname, collection, &rec_finished)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last",
 		DatasetOp: true, MinDatasetSize: 2, ExtraParam: "0"})
 
 	assert.NotNil(t, err)
@@ -818,7 +836,7 @@ func TestMongoDBGetRecordLastDataSetOK(t *testing.T) {
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset3)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
@@ -833,7 +851,7 @@ func TestMongoDBGetDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -849,7 +867,7 @@ func TestMongoDBErrorOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Equal(t, utils.StatusPartialData, err.(*DBError).Code)
 
@@ -865,7 +883,7 @@ func TestMongoDBOkOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, MinDatasetSize: 3, ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "id", DatasetOp: true, MinDatasetSize: 3, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -912,7 +930,7 @@ func TestMongoDBListStreams(t *testing.T) {
 		}
 		var rec_streams_expect, _ = json.Marshal(test.expectedStreams)
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: "0", Op: "streams", ExtraParam: test.from})
+		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: "0", Op: "streams", ExtraParam: test.from})
 		if test.ok {
 			assert.Nil(t, err, test.test)
 			assert.Equal(t, string(rec_streams_expect), string(res), test.test)
@@ -932,7 +950,7 @@ func TestMongoDBAckMessage(t *testing.T) {
 
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 
-	request := Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
+	request := Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
 	res, err := db.ProcessRequest(request)
 	nacks, _ := db.getNacks(request, 0, 0)
 	assert.Nil(t, err)
@@ -969,12 +987,12 @@ func TestMongoDBNacks(t *testing.T) {
 			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
 		}
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "nacks", ExtraParam: test.rangeString})
+		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "nacks", ExtraParam: test.rangeString})
 		if test.ok {
 			assert.Nil(t, err, test.test)
 			assert.Equal(t, test.resString, string(res), test.test)
@@ -1004,12 +1022,12 @@ func TestMongoDBLastAcks(t *testing.T) {
 			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
-			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":3,\"Op\":\"ackmessage\"}"})
+			db.ackRecord(Request{DbName: dbname, Stream: collection, GroupId: groupId, ExtraParam: "{\"Id\":4,\"Op\":\"ackmessage\"}"})
 		}
 
-		res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "lastack"})
+		res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "lastack"})
 		assert.Nil(t, err, test.test)
 		assert.Equal(t, test.resString, string(res), test.test)
 		cleanup()
@@ -1023,8 +1041,8 @@ func TestMongoDBGetNextUsesInprocessedImmedeatly(t *testing.T) {
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1037,9 +1055,9 @@ func TestMongoDBGetNextUsesInprocessedNumRetry(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1057,10 +1075,10 @@ func TestMongoDBGetNextUsesInprocessedAfterTimeout(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	time.Sleep(time.Second)
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1076,10 +1094,10 @@ func TestMongoDBGetNextReturnsToNormalAfterUsesInprocessed(t *testing.T) {
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
 	db.insertRecord(dbname, collection, &rec_finished3)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	time.Sleep(time.Second)
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
+	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1094,8 +1112,8 @@ func TestMongoDBGetNextUsesInprocessedImmedeatlyIfFinishedStream(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec_finished)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Equal(t, string(rec1_expect), string(res))
@@ -1108,9 +1126,9 @@ func TestMongoDBGetNextUsesInprocessedImmedeatlyIfEndofStream(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
 	assert.Nil(t, err2)
@@ -1124,11 +1142,11 @@ func TestMongoDBAckDeletesInprocessed(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	_, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_3"})
 	assert.NotNil(t, err)
 	if err != nil {
 		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
@@ -1142,8 +1160,8 @@ func TestMongoDBAckTwiceErrors(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec1)
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
-	_,err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
+	_,err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str})
 	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
 }
 
@@ -1162,14 +1180,14 @@ func TestMongoDBNegAck(t *testing.T) {
 	inputParams.Params.DelayMs = 0
 
 	db.insertRecord(dbname, collection, &rec1)
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})
 	bparam, _ := json.Marshal(&inputParams)
 
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"}) // first time message from negack
-	_, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})  // second time nothing
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
-	_, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})  // second time nothing
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"}) // first time message from negack
+	_, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"})  // second time nothing
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
+	_, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next"}) // second time nothing
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
@@ -1188,12 +1206,12 @@ func TestMongoDBGetNextClearsInprocessAfterReset(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
-	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res, err := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res1, err1 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
-	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "0"})
-	res2, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
-	res3, err3 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "resetcounter", ExtraParam: "0"})
+	res2, err2 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
+	res3, err3 := db.ProcessRequest(Request{DbName: dbname, Stream: collection, GroupId: groupId, Op: "next", ExtraParam: "0_1"})
 
 	assert.Nil(t, err)
 	assert.Nil(t, err1)
@@ -1223,12 +1241,12 @@ func TestDeleteStreams(t *testing.T) {
 	for _, test := range testsDeleteStream {
 		db.Connect(dbaddress)
 		db.insertRecord(dbname, encodeStringForColName(test.stream), &rec1)
-		db.ProcessRequest(Request{DbName: dbname, DbCollectionName: test.stream, GroupId: "123", Op: "next"})
+		db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "123", Op: "next"})
 		query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-		request := Request{DbName: dbname, DbCollectionName: test.stream, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
+		request := Request{DbName: dbname, Stream: test.stream, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
 		_, err := db.ProcessRequest(request)
 		assert.Nil(t, err, test.message)
-		_, err = db.ProcessRequest(Request{DbName: dbname, DbCollectionName: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		_, err = db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
 		if test.ok {
 			rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 			acks_exist,_:= db.collectionExist(Request{DbName: dbname, ExtraParam: ""},acks_collection_name_prefix+test.stream)
@@ -1240,7 +1258,7 @@ func TestDeleteStreams(t *testing.T) {
 		} else {
 			assert.NotNil(t, err, test.message)
 		}
-		_, err = db.ProcessRequest(Request{DbName: dbname, DbCollectionName: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		_, err = db.ProcessRequest(Request{DbName: dbname, Stream: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
 		if test.ok2 {
 			assert.Nil(t, err, test.message+" 2")
 		} else {
@@ -1271,7 +1289,7 @@ func TestMongoDBEncodingOK(t *testing.T) {
 	for _, test := range testsEncodings {
 		db.Connect(dbaddress)
 		db.insertRecord(test.dbname_indb, test.collection_indb, &rec1)
-		res, err := db.ProcessRequest(Request{DbName: test.dbname, DbCollectionName: test.collection, GroupId: test.group, Op: "next"})
+		res, err := db.ProcessRequest(Request{DbName: test.dbname, Stream: test.collection, GroupId: test.group, Op: "next"})
 		if test.ok {
 			assert.Nil(t, err, test.message)
 			assert.Equal(t, string(rec1_expect), string(res), test.message)
diff --git a/broker/src/asapo_broker/server/get_commands_test.go b/broker/src/asapo_broker/server/get_commands_test.go
index 40c41c2b6bcb9ef70fd2d99567d7ab25caf1cb7d..980946e49cd6022dc1c781fd8e3098729a6778f3 100644
--- a/broker/src/asapo_broker/server/get_commands_test.go
+++ b/broker/src/asapo_broker/server/get_commands_test.go
@@ -59,7 +59,7 @@ var testsGetCommand = []struct {
 
 func (suite *GetCommandsTestSuite) TestGetCommandsCallsCorrectRoutine() {
 	for _, test := range testsGetCommand {
-		suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: test.stream, GroupId: test.groupid, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
+		suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: test.stream, GroupId: test.groupid, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
 		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request "+test.command)))
 		w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + test.source + "/" + test.reqString+correctTokenSuffix+test.queryParams)
 		suite.Equal(http.StatusOK, w.Code, test.command+ " OK")
@@ -83,7 +83,7 @@ func (suite *GetCommandsTestSuite) TestGetCommandsCorrectlyProcessedEncoding() {
 		test.reqString = strings.Replace(test.reqString,test.source,encodedSource,1)
 		test.reqString = strings.Replace(test.reqString,test.stream,encodedStream,1)
 		dbname := expectedBeamtimeId + "_" + newsource
-		suite.mock_db.On("ProcessRequest", database.Request{DbName: dbname, DbCollectionName: newstream, GroupId: newgroup, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
+		suite.mock_db.On("ProcessRequest", database.Request{DbName: dbname, Stream: newstream, GroupId: newgroup, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
 		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request "+test.command)))
 		w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + encodedSource + "/" + test.reqString+correctTokenSuffix+test.queryParams)
 		suite.Equal(http.StatusOK, w.Code, test.command+ " OK")
diff --git a/broker/src/asapo_broker/server/get_meta_test.go b/broker/src/asapo_broker/server/get_meta_test.go
index 550efb653178533801ed305926cedd2ae1609cbf..b54a72865f02d4358b4cfc8abf4f2a0bb6678acf 100644
--- a/broker/src/asapo_broker/server/get_meta_test.go
+++ b/broker/src/asapo_broker/server/get_meta_test.go
@@ -33,9 +33,9 @@ func TestGetMetaTestSuite(t *testing.T) {
 }
 
 func (suite *GetMetaTestSuite) TestGetMetaOK() {
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, Op: "meta", ExtraParam: "1"}).Return([]byte(""), nil)
+	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream, Op: "meta", ExtraParam: "0"}).Return([]byte(""), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request meta")))
-	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/0/meta"  + "/1" + correctTokenSuffix,"GET")
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/0/meta"  + "/0" + correctTokenSuffix,"GET")
 	suite.Equal(http.StatusOK, w.Code, "meta OK")
 }
 
diff --git a/broker/src/asapo_broker/server/post_op_image_test.go b/broker/src/asapo_broker/server/post_op_image_test.go
index fc1a2d4e72499983f88da4eab17786ffd95f6871..2cc3159ee6a3469490ed3ec082947270e8e49db4 100644
--- a/broker/src/asapo_broker/server/post_op_image_test.go
+++ b/broker/src/asapo_broker/server/post_op_image_test.go
@@ -34,7 +34,7 @@ func TestMessageOpTestSuite(t *testing.T) {
 
 func (suite *MessageOpTestSuite) TestAckMessageOpOK() {
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "ackmessage", ExtraParam: query_str}).Return([]byte(""), nil)
+	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "ackmessage", ExtraParam: query_str}).Return([]byte(""), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request ackmessage")))
 	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
 	suite.Equal(http.StatusOK, w.Code, "ackmessage OK")
diff --git a/broker/src/asapo_broker/server/post_query_images_test.go b/broker/src/asapo_broker/server/post_query_images_test.go
index 16aca9242eebd867c58f90e5860f3bd9c665cca1..d51d2490ab3b0063ff078b92430b08cfeb28db1f 100644
--- a/broker/src/asapo_broker/server/post_query_images_test.go
+++ b/broker/src/asapo_broker/server/post_query_images_test.go
@@ -35,7 +35,7 @@ func TestQueryTestSuite(t *testing.T) {
 func (suite *QueryTestSuite) TestQueryOK() {
 	query_str := "aaaa"
 
-	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream,Op: "querymessages", ExtraParam: query_str}).Return([]byte("{}"), nil)
+	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, Stream: expectedStream,Op: "querymessages", ExtraParam: query_str}).Return([]byte("{}"), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request querymessages")))
 
 	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/0/querymessages"+correctTokenSuffix, "POST", query_str)
diff --git a/broker/src/asapo_broker/server/post_reset_counter_test.go b/broker/src/asapo_broker/server/post_reset_counter_test.go
index 10fb4e1b0a360707df952060f633cb99041d1cfc..64291bee21024ef2b5dbee377c2ff2b3aec3aeaf 100644
--- a/broker/src/asapo_broker/server/post_reset_counter_test.go
+++ b/broker/src/asapo_broker/server/post_reset_counter_test.go
@@ -33,7 +33,7 @@ func TestResetCounterTestSuite(t *testing.T) {
 }
 
 func (suite *ResetCounterTestSuite) TestResetCounterOK() {
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, Op: "resetcounter", ExtraParam: "10"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId:expectedGroupID, Op: "resetcounter", ExtraParam: "10"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""), nil)
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request resetcounter")))
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 23fe151a8865dc7debecc7c770a561fba8eb803b..41b6564b7a91f5d2902febfbfbd3f58f8d207ac9 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -78,7 +78,7 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 	request.DbName = db_name+"_"+datasource
 	request.Op = op
 	request.ExtraParam = extra_param
-	request.DbCollectionName = stream
+	request.Stream = stream
 	request.GroupId = group_id
 	if yes, minSize := datasetRequested(r); yes {
 		request.DatasetOp = true
diff --git a/broker/src/asapo_broker/server/process_request_test.go b/broker/src/asapo_broker/server/process_request_test.go
index 4ac81cd3b1a979b4e2aa4878079758cba8bd091a..781a7f16b2df345e4c1cb77aca727f7ec7a85608 100644
--- a/broker/src/asapo_broker/server/process_request_test.go
+++ b/broker/src/asapo_broker/server/process_request_test.go
@@ -151,7 +151,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithNoToken() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusNoData, ""})
@@ -165,7 +165,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName()
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusServiceUnavailable, ""})
@@ -181,7 +181,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""), errors.New(""))
 	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("processing request next")))
@@ -196,7 +196,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
@@ -207,7 +207,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsDataset() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, DatasetOp: true, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: expectedGroupID, DatasetOp: true, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
@@ -231,7 +231,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamReadToken()
 func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamWriteToken() {
 	query_str := "query_string"
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: "", Op: "delete_stream", ExtraParam: query_str}
+	expectedRequest := database.Request{DbName: expectedDBName, Stream: expectedStream, GroupId: "", Op: "delete_stream", ExtraParam: query_str}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request delete_stream in "+expectedDBName)))
diff --git a/common/cpp/include/asapo/common/data_structs.h b/common/cpp/include/asapo/common/data_structs.h
index 847650fc750f71eb0ea78defbd0dcff2c683a422..1c771e9a96cee57d3ce00ccfb41b5dda3a3ffa4b 100644
--- a/common/cpp/include/asapo/common/data_structs.h
+++ b/common/cpp/include/asapo/common/data_structs.h
@@ -145,6 +145,27 @@ enum IngestModeFlags : uint64_t {
 
 const uint64_t kDefaultIngestMode = kTransferData | kStoreInFilesystem | kStoreInDatabase;
 
+enum class MetaIngestOp : uint64_t {
+    kInsert  = 1,
+    kReplace = 2,
+    kUpdate = 3,
+};
+
+struct MetaIngestMode {
+    MetaIngestOp op;
+    bool upsert;
+    uint64_t Encode() {
+        return static_cast<uint64_t>(op) + 10 * static_cast<uint64_t>(upsert);
+    }
+    void Decode(uint64_t code) {
+        upsert = code > 10;
+        uint64_t val = code - (upsert ? 10 : 0);
+        if (val <= 3) {
+            op = static_cast<MetaIngestOp>(val);
+        }
+    }
+};
+
 class ClientProtocol {
   private:
     std::string version_;
diff --git a/common/cpp/include/asapo/common/internal/version.h.in b/common/cpp/include/asapo/common/internal/version.h.in
index 5ac6f40174d868d9201e39e139c43fcbd08f9b1f..276a57a413e2b4b3f1ce30df9b819e9d3218841c 100644
--- a/common/cpp/include/asapo/common/internal/version.h.in
+++ b/common/cpp/include/asapo/common/internal/version.h.in
@@ -28,6 +28,10 @@ inline std::string GetReceiverApiVersion() {
     return "@ASAPO_RECEIVER_API_VER@";
 }
 
+inline std::string GetDbSchemaVersion() {
+    return "@DB_SCHEMA_VER@";
+}
+
 inline std::string GetRdsApiVersion() {
     return "@ASAPO_RDS_API_VER@";
 }
diff --git a/common/cpp/include/asapo/common/networking.h b/common/cpp/include/asapo/common/networking.h
index c9e07f1b72d33c043d8a9061b278a026035b751e..ca2c2f8e81fb6b788581e1d1cfe13cdbc816ca39 100644
--- a/common/cpp/include/asapo/common/networking.h
+++ b/common/cpp/include/asapo/common/networking.h
@@ -30,6 +30,7 @@ enum Opcode : uint8_t {
     kOpcodeAuthorize,
     kOpcodeTransferMetaData,
     kOpcodeDeleteStream,
+    kOpcodeGetMeta,
     kOpcodeCount,
 };
 
@@ -53,6 +54,7 @@ const std::size_t kNCustomParams = 3;
 using CustomRequestData = uint64_t[kNCustomParams];
 const std::size_t kPosIngestMode = 0;
 const std::size_t kPosDataSetId = 1;
+const std::size_t kPosMetaIngestMode = 1;
 const std::size_t kPosDataSetSize = 2;
 
 struct GenericRequestHeader {
diff --git a/common/cpp/include/asapo/database/database.h b/common/cpp/include/asapo/database/database.h
index fbe45cccb61c9069d47582463e0a2eda39c5af93..e21af48757b4774baf42d7435bb4b01a020c00ae 100644
--- a/common/cpp/include/asapo/database/database.h
+++ b/common/cpp/include/asapo/database/database.h
@@ -15,7 +15,8 @@ class Database {
   public:
     virtual Error Connect(const std::string& address, const std::string& database) = 0;
     virtual Error Insert(const std::string& collection, const MessageMeta& file, bool ignore_duplicates) const = 0;
-    virtual Error Upsert(const std::string& collection, uint64_t id, const uint8_t* data, uint64_t size) const = 0;
+    virtual Error InsertMeta(const std::string& collection, const std::string& id, const uint8_t* data, uint64_t size,
+                             MetaIngestMode mode) const = 0;
     virtual Error InsertAsDatasetMessage(const std::string& collection, const MessageMeta& file,
                                          uint64_t dataset_size,
                                          bool ignore_duplicates) const = 0;
@@ -25,6 +26,8 @@ class Database {
     virtual Error GetStreamInfo(const std::string& collection, StreamInfo* info) const  = 0;
     virtual Error GetLastStream(StreamInfo* info) const  = 0;
     virtual Error DeleteStream(const std::string& stream) const = 0;
+    virtual Error GetMetaFromDb(const std::string& collection, const std::string& id, std::string* res) const = 0;
+
     virtual ~Database() = default;
 };
 
diff --git a/common/cpp/include/asapo/json_parser/json_parser.h b/common/cpp/include/asapo/json_parser/json_parser.h
index ed3f0cde77db84f5ef6af3d4b7363eefe1d2557e..b8bd14f69bca16c77058354b965c6a5b4b4c84cc 100644
--- a/common/cpp/include/asapo/json_parser/json_parser.h
+++ b/common/cpp/include/asapo/json_parser/json_parser.h
@@ -22,6 +22,7 @@ class JsonParser {
     Error GetArrayString(const std::string& name, std::vector<std::string>* val) const noexcept;
     Error GetArrayRawStrings(const std::string& name, std::vector<std::string>* val) const noexcept;
     Error GetRawString(std::string* val) const noexcept;
+    Error GetFlattenedString(const std::string& prefix, const std::string& separator, std::string* val) const noexcept;
 
     JsonParser Embedded(const std::string& name) const noexcept;
     ~JsonParser();
diff --git a/common/cpp/include/asapo/preprocessor/definitions.h b/common/cpp/include/asapo/preprocessor/definitions.h
index 3ac042caac7247b75863a6a3301dd780fe25bd00..1f6b9fd8b7fa081969c3c79a0c033233eb296c2c 100644
--- a/common/cpp/include/asapo/preprocessor/definitions.h
+++ b/common/cpp/include/asapo/preprocessor/definitions.h
@@ -9,6 +9,17 @@
 #define FINAL final
 #endif
 
+#if defined(__GNUC__) || defined(__clang__)
+#define DEPRECATED(msg) __attribute__((deprecated(msg)))
+#elif defined(_MSC_VER)
+#define DEPRECATED(msg) __declspec(deprecated(msg))
+#else
+#pragma message("WARNING: You need to implement DEPRECATED for this compiler")
+#define DEPRECATED(msg)
+#endif
+
+
+
 namespace  asapo {
 const char kPathSeparator =
 #ifdef WIN32
diff --git a/common/cpp/include/asapo/request/request_handler_factory.h b/common/cpp/include/asapo/request/request_handler_factory.h
index c48d787ce72df0d01c6e5de152cd02246a12db66..dae120bc0ac78ba296339ebd0814e15d4cdbc0e9 100644
--- a/common/cpp/include/asapo/request/request_handler_factory.h
+++ b/common/cpp/include/asapo/request/request_handler_factory.h
@@ -10,6 +10,7 @@ namespace  asapo {
 class RequestHandlerFactory {
   public:
     virtual std::unique_ptr<RequestHandler> NewRequestHandler(uint64_t thread_id, uint64_t* shared_counter) = 0;
+    virtual ~RequestHandlerFactory() {};
 };
 
 
diff --git a/common/cpp/include/asapo/unittests/MockDatabase.h b/common/cpp/include/asapo/unittests/MockDatabase.h
index fff0633a7814c84de6eee5677b8c4412bc067545..f8c2464364c7eaf7ebec9c3d26fd622a3de3542b 100644
--- a/common/cpp/include/asapo/unittests/MockDatabase.h
+++ b/common/cpp/include/asapo/unittests/MockDatabase.h
@@ -30,11 +30,13 @@ class MockDatabase : public Database {
     MOCK_CONST_METHOD4(InsertAsDatasetMessage_t,
                        ErrorInterface * (const std::string&, const MessageMeta&, uint64_t, bool));
 
-    Error Upsert(const std::string& collection, uint64_t id, const uint8_t* data, uint64_t size) const override {
-        return Error{Upsert_t(collection, id, data, size)};
+    Error InsertMeta(const std::string& collection, const std::string& id, const uint8_t* data, uint64_t size,
+                     MetaIngestMode mode) const override {
+        return Error{InsertMeta_t(collection, id, data, size, mode)};
 
     }
-    MOCK_CONST_METHOD4(Upsert_t, ErrorInterface * (const std::string&, uint64_t id, const uint8_t* data, uint64_t size));
+    MOCK_CONST_METHOD5(InsertMeta_t, ErrorInterface * (const std::string&, const std::string& id, const uint8_t* data,
+                       uint64_t size, MetaIngestMode mode));
 
     Error GetById(const std::string& collection, uint64_t id, MessageMeta* file) const override {
         return Error{GetById_t(collection, id, file)};
@@ -46,6 +48,11 @@ class MockDatabase : public Database {
         return Error{GetSetById_t(collection, set_id, id, file)};
     }
 
+    Error GetMetaFromDb(const std::string& collection, const std::string& id, std::string* res) const override {
+        return Error{GetMetaFromDb_t(collection, id, res)};
+    }
+    MOCK_CONST_METHOD3(GetMetaFromDb_t, ErrorInterface * (const std::string&, const std::string&, std::string* res));
+
     MOCK_CONST_METHOD4(GetSetById_t, ErrorInterface * (const std::string&, uint64_t set_id, uint64_t id, MessageMeta*));
 
     Error GetStreamInfo(const std::string& collection, StreamInfo* info) const override {
diff --git a/common/cpp/src/database/mongodb_client.cpp b/common/cpp/src/database/mongodb_client.cpp
index 432982976c4ec6eaae9888ab6a299e685fe1e349..d4e1de03f6f48e26ef6ff2c86194d3588575662d 100644
--- a/common/cpp/src/database/mongodb_client.cpp
+++ b/common/cpp/src/database/mongodb_client.cpp
@@ -8,6 +8,8 @@
 #include "asapo/database/db_error.h"
 #include "asapo/common/data_structs.h"
 
+#include "asapo/common/internal/version.h"
+
 namespace asapo {
 
 using asapo::Database;
@@ -65,7 +67,7 @@ Error MongoDBClient::UpdateCurrentCollectionIfNeeded(const std::string& collecti
 
     auto encoded_name  = EncodeColName(collection_name);
     if (encoded_name.size() > maxCollectionNameLength) {
-        return DBErrorTemplates::kWrongInput.Generate("stream name too long");
+        return DBErrorTemplates::kWrongInput.Generate("collection name too long");
     }
 
     current_collection_ = mongoc_client_get_collection(client_, database_name_.c_str(),
@@ -140,24 +142,52 @@ bson_p PrepareBsonDocument(const MessageMeta& file, Error* err) {
         return nullptr;
     }
 
+
     *err = nullptr;
     return bson_p{bson};
 }
 
-bson_p PrepareBsonDocument(const uint8_t* json, ssize_t len, Error* err) {
+bson_p PrepareUpdateDocument(const uint8_t* json, Error* err) {
+    JsonStringParser parser{std::string(reinterpret_cast<const char*>(json))};
+    std::string json_flat;
+    auto parser_err = parser.GetFlattenedString("meta", ".", &json_flat);
+    if (parser_err) {
+        *err = DBErrorTemplates::kJsonParseError.Generate("cannof flatten meta " + parser_err->Explain());
+        return nullptr;
+    }
+    bson_error_t mongo_err;
+    auto bson_meta = bson_new_from_json(reinterpret_cast<const uint8_t*>(json_flat.c_str()), json_flat.size(), &mongo_err);
+    if (!bson_meta) {
+        *err = DBErrorTemplates::kJsonParseError.Generate(mongo_err.message);
+        return nullptr;
+    }
+    return bson_p{bson_meta};
+}
+
+
+bson_p PrepareInjestDocument(const uint8_t* json, ssize_t len, Error* err) {
     bson_error_t mongo_err;
     if (json == nullptr) {
         *err = TextError("empty metadata");
         return nullptr;
     }
 
-    auto bson = bson_new_from_json(json, len, &mongo_err);
-    if (!bson) {
+
+    auto bson_meta = bson_new_from_json(json, len, &mongo_err);
+    if (!bson_meta) {
         *err = DBErrorTemplates::kJsonParseError.Generate(mongo_err.message);
         return nullptr;
     }
+    auto bson =  bson_new();
+    if (!BSON_APPEND_DOCUMENT(bson, "meta", bson_meta)
+            || !BSON_APPEND_UTF8(bson, "schema_version", GetDbSchemaVersion().c_str())) {
+        *err = DBErrorTemplates::kInsertError.Generate("cannot add schema version ");
+        bson_destroy(bson_meta);
+        return nullptr;
+    }
 
     *err = nullptr;
+    bson_destroy(bson_meta);
     return bson_p{bson};
 }
 
@@ -173,20 +203,61 @@ Error MongoDBClient::InsertBsonDocument(const bson_p& document, bool ignore_dupl
     return nullptr;
 }
 
-Error MongoDBClient::UpdateBsonDocument(uint64_t id, const bson_p& document, bool upsert) const {
+bool documentWasChanged(bson_t* reply) {
+    bson_iter_t iter;
+    bson_iter_init_find(&iter, reply, "upsertedCount");
+    auto n_upsert = bson_iter_int32(&iter);
+    bson_iter_init_find(&iter, reply, "modifiedCount");
+    auto n_mod = bson_iter_int32(&iter);
+    bson_iter_init_find(&iter, reply, "matchedCount");
+    auto n_matched = bson_iter_int32(&iter);
+    return n_mod + n_upsert + n_matched > 0;
+}
+
+Error MongoDBClient::ReplaceBsonDocument(const std::string& id, const bson_p& document, bool upsert) const {
     bson_error_t mongo_err;
 
     bson_t* opts = BCON_NEW ("upsert", BCON_BOOL(upsert));
-    bson_t* selector = BCON_NEW ("_id", BCON_INT64(id));
-
+    bson_t* selector = BCON_NEW ("_id", BCON_UTF8(id.c_str()));
+    bson_t reply;
     Error err = nullptr;
 
-    if (!mongoc_collection_replace_one(current_collection_, selector, document.get(), opts, NULL, &mongo_err)) {
+    if (!mongoc_collection_replace_one(current_collection_, selector, document.get(), opts, &reply, &mongo_err)) {
         err = DBErrorTemplates::kInsertError.Generate(mongo_err.message);
     }
 
+    if (err == nullptr && !documentWasChanged(&reply)) {
+        err = DBErrorTemplates::kWrongInput.Generate("cannot replace: metadata does not exist");
+    }
+
     bson_free(opts);
     bson_free(selector);
+    bson_destroy (&reply);
+
+    return err;
+}
+
+Error MongoDBClient::UpdateBsonDocument(const std::string& id, const bson_p& document, bool upsert) const {
+    bson_error_t mongo_err;
+
+    bson_t* opts = BCON_NEW ("upsert", BCON_BOOL(upsert));
+    bson_t* selector = BCON_NEW ("_id", BCON_UTF8(id.c_str()));
+    bson_t* update  = BCON_NEW ("$set", BCON_DOCUMENT(document.get()));
+
+    bson_t reply;
+    Error err = nullptr;
+    if (!mongoc_collection_update_one(current_collection_, selector, update, opts, &reply, &mongo_err)) {
+        err = DBErrorTemplates::kInsertError.Generate(mongo_err.message);
+    }
+
+    if (err == nullptr && !documentWasChanged(&reply)) {
+        err = DBErrorTemplates::kWrongInput.Generate("cannot update: metadata does not exist");
+    }
+
+    bson_free(opts);
+    bson_free(selector);
+    bson_destroy (&reply);
+    bson_destroy (update);
 
     return err;
 }
@@ -213,7 +284,29 @@ MongoDBClient::~MongoDBClient() {
     CleanUp();
 }
 
-Error MongoDBClient::Upsert(const std::string& collection, uint64_t id, const uint8_t* data, uint64_t size) const {
+bson_p PrepareBsonDocument(const uint8_t* json, ssize_t len, const std::string& id_encoded, MetaIngestMode mode,
+                           Error* err) {
+    bson_p document;
+    if (mode.op == MetaIngestOp::kUpdate) {
+        document = PrepareUpdateDocument(json, err);
+    } else {
+        document = PrepareInjestDocument(json, len, err);
+    }
+    if (*err) {
+        return nullptr;
+    }
+    if (mode.op != MetaIngestOp::kUpdate) {
+        if (!BSON_APPEND_UTF8(document.get(), "_id", id_encoded.c_str())) {
+            *err = DBErrorTemplates::kInsertError.Generate("cannot assign document id ");
+            return nullptr;
+        }
+    }
+    return document;
+}
+
+Error MongoDBClient::InsertMeta(const std::string& collection, const std::string& id, const uint8_t* data,
+                                uint64_t size,
+                                MetaIngestMode mode) const {
     if (!connected_) {
         return DBErrorTemplates::kNotConnected.Generate();
     }
@@ -223,16 +316,22 @@ Error MongoDBClient::Upsert(const std::string& collection, uint64_t id, const ui
         return err;
     }
 
-    auto document = PrepareBsonDocument(data, (ssize_t) size, &err);
+    auto id_encoded = EncodeColName(id);
+    auto document = PrepareBsonDocument(data, (ssize_t)size, id_encoded, mode, &err);
     if (err) {
         return err;
     }
 
-    if (!BSON_APPEND_INT64(document.get(), "_id", id)) {
-        err = DBErrorTemplates::kInsertError.Generate("cannot assign document id ");
-    }
+    switch (mode.op) {
+    case MetaIngestOp::kInsert:
+        return InsertBsonDocument(document, false);
+    case asapo::MetaIngestOp::kReplace:
+        return ReplaceBsonDocument(id_encoded, document, mode.upsert);
+    case MetaIngestOp::kUpdate:
+        return UpdateBsonDocument(id_encoded, document, mode.upsert);
+        break;
 
-    return UpdateBsonDocument(id, document, true);
+    }
 
 }
 
@@ -278,6 +377,7 @@ Error MongoDBClient::InsertAsDatasetMessage(const std::string& collection, const
                   BCON_INT64(file.dataset_substream), "}", "}", "]");
     auto update = BCON_NEW ("$setOnInsert", "{",
                             "size", BCON_INT64(dataset_size),
+                            "schema_version", GetDbSchemaVersion().c_str(),
                             "timestamp", BCON_INT64((int64_t) NanosecsEpochFromTimePoint(file.timestamp)),
                             "}",
                             "$addToSet", "{",
@@ -291,7 +391,8 @@ Error MongoDBClient::InsertAsDatasetMessage(const std::string& collection, const
     return err;
 }
 
-Error MongoDBClient::GetRecordFromDb(const std::string& collection, uint64_t id, GetRecordMode mode,
+Error MongoDBClient::GetRecordFromDb(const std::string& collection, uint64_t id, const std::string& string_id,
+                                     GetRecordMode mode,
                                      std::string* res) const {
     if (!connected_) {
         return DBErrorTemplates::kNotConnected.Generate();
@@ -310,6 +411,10 @@ Error MongoDBClient::GetRecordFromDb(const std::string& collection, uint64_t id,
     char* str;
 
     switch (mode) {
+    case GetRecordMode::kByStringId:
+        filter = BCON_NEW ("_id", BCON_UTF8(string_id.c_str()));
+        opts = BCON_NEW ("limit", BCON_INT64(1));
+        break;
     case GetRecordMode::kById:
         filter = BCON_NEW ("_id", BCON_INT64(id));
         opts = BCON_NEW ("limit", BCON_INT64(1));
@@ -351,7 +456,7 @@ Error MongoDBClient::GetRecordFromDb(const std::string& collection, uint64_t id,
 
 Error MongoDBClient::GetById(const std::string& collection, uint64_t id, MessageMeta* file) const {
     std::string record_str;
-    auto err = GetRecordFromDb(collection, id, GetRecordMode::kById, &record_str);
+    auto err = GetRecordFromDb(collection, id, "", GetRecordMode::kById, &record_str);
     if (err) {
         return err;
     }
@@ -367,7 +472,7 @@ Error MongoDBClient::GetDataSetById(const std::string& collection,
                                     uint64_t id,
                                     MessageMeta* file) const {
     std::string record_str;
-    auto err = GetRecordFromDb(collection, id, GetRecordMode::kById, &record_str);
+    auto err = GetRecordFromDb(collection, id, "", GetRecordMode::kById, &record_str);
     if (err) {
         return err;
     }
@@ -469,7 +574,7 @@ Error StreamInfoFromDbResponse(const std::string& last_record_str,
 
 Error MongoDBClient::GetStreamInfo(const std::string& collection, StreamInfo* info) const {
     std::string last_record_str, earliest_record_str;
-    auto err = GetRecordFromDb(collection, 0, GetRecordMode::kLast, &last_record_str);
+    auto err = GetRecordFromDb(collection, 0, "", GetRecordMode::kLast, &last_record_str);
     if (err) {
         if (err
                 == DBErrorTemplates::kNoRecord) { // with noRecord error it will return last_id = 0 which can be used to understand that the stream is not started yet
@@ -478,7 +583,7 @@ Error MongoDBClient::GetStreamInfo(const std::string& collection, StreamInfo* in
         }
         return err;
     }
-    err = GetRecordFromDb(collection, 0, GetRecordMode::kEarliest, &earliest_record_str);
+    err = GetRecordFromDb(collection, 0, "", GetRecordMode::kEarliest, &earliest_record_str);
     if (err) {
         return err;
     }
@@ -616,7 +721,23 @@ Error MongoDBClient::DeleteStream(const std::string& stream) const {
         std::string querystr = ".*_" + EscapeQuery(stream_encoded) + "$";
         DeleteDocumentsInCollection("current_location", querystr);
     }
+    DeleteDocumentsInCollection("meta", "^" + EscapeQuery(stream_encoded) + "$");
     return err;
 }
 
+Error MongoDBClient::GetMetaFromDb(const std::string& collection, const std::string& id, std::string* res) const {
+    std::string meta_str;
+    auto err = GetRecordFromDb(collection, 0, EncodeColName(id), GetRecordMode::kByStringId, &meta_str);
+    if (err) {
+        return err;
+    }
+    auto parser = JsonStringParser(meta_str);
+    err = parser.Embedded("meta").GetRawString(res);
+    if (err) {
+        return DBErrorTemplates::kJsonParseError.Generate(
+                   "GetMetaFromDb: cannot parse database response: " + err->Explain());
+    }
+    return nullptr;
+}
+
 }
diff --git a/common/cpp/src/database/mongodb_client.h b/common/cpp/src/database/mongodb_client.h
index 3999671fe73f18c5a26708a29a80834fb83f77e5..d47ba4bc50c6c18ec7cec61ad3457e4b06447a4e 100644
--- a/common/cpp/src/database/mongodb_client.h
+++ b/common/cpp/src/database/mongodb_client.h
@@ -36,7 +36,8 @@ using bson_p = std::unique_ptr<_bson_t, BsonDestroyFunctor>;
 enum class GetRecordMode {
     kById,
     kLast,
-    kEarliest
+    kEarliest,
+    kByStringId,
 };
 
 const size_t maxDbNameLength = 63;
@@ -49,12 +50,14 @@ class MongoDBClient final : public Database {
     Error Insert(const std::string& collection, const MessageMeta& file, bool ignore_duplicates) const override;
     Error InsertAsDatasetMessage(const std::string& collection, const MessageMeta& file, uint64_t dataset_size,
                                  bool ignore_duplicates) const override;
-    Error Upsert(const std::string& collection, uint64_t id, const uint8_t* data, uint64_t size) const override;
+    Error InsertMeta(const std::string& collection, const std::string& id, const uint8_t* data, uint64_t size,
+                     MetaIngestMode mode) const override;
     Error GetById(const std::string& collection, uint64_t id, MessageMeta* file) const override;
     Error GetDataSetById(const std::string& collection, uint64_t id_in_set, uint64_t id, MessageMeta* file) const override;
     Error GetStreamInfo(const std::string& collection, StreamInfo* info) const override;
     Error GetLastStream(StreamInfo* info) const override;
     Error DeleteStream(const std::string& stream) const override;
+    Error GetMetaFromDb(const std::string& collection, const std::string& id, std::string* res) const override;
     ~MongoDBClient() override;
   private:
     mongoc_client_t* client_{nullptr};
@@ -70,9 +73,11 @@ class MongoDBClient final : public Database {
     Error Ping();
     Error TryConnectDatabase();
     Error InsertBsonDocument(const bson_p& document, bool ignore_duplicates) const;
-    Error UpdateBsonDocument(uint64_t id, const bson_p& document, bool upsert) const;
+    Error ReplaceBsonDocument(const std::string& id, const bson_p& document, bool upsert) const;
+    Error UpdateBsonDocument(const std::string& id, const bson_p& document, bool upsert) const;
     Error AddBsonDocumentToArray(bson_t* query, bson_t* update, bool ignore_duplicates) const;
-    Error GetRecordFromDb(const std::string& collection, uint64_t id, GetRecordMode mode, std::string* res) const;
+    Error GetRecordFromDb(const std::string& collection, uint64_t id, const std::string& string_id, GetRecordMode mode,
+                          std::string* res) const;
     Error UpdateLastStreamInfo(const char* str, StreamInfo* info) const;
     Error DeleteCollection(const std::string& name) const;
     Error DeleteCollections(const std::string& prefix) const;
diff --git a/common/cpp/src/json_parser/json_parser.cpp b/common/cpp/src/json_parser/json_parser.cpp
index 8ef8828205f4d1713a5bf784382b46326408baa8..b25cb6b4a9d1f96226e60b4f5ccfe5fdfaf1afe3 100644
--- a/common/cpp/src/json_parser/json_parser.cpp
+++ b/common/cpp/src/json_parser/json_parser.cpp
@@ -57,6 +57,10 @@ Error JsonParser::GetRawString(std::string* val) const noexcept {
 Error JsonParser::GetArrayRawStrings(const std::string& name, std::vector<std::string>* val) const noexcept {
     return rapid_json_->GetArrayRawStrings(name, val);
 }
+Error JsonParser::GetFlattenedString(const std::string& prefix, const std::string& separator,
+                                     std::string* val) const noexcept {
+    return rapid_json_->GetFlattenedString(prefix, separator, val);
+}
 
 }
 
diff --git a/common/cpp/src/json_parser/rapid_json.cpp b/common/cpp/src/json_parser/rapid_json.cpp
index 34e0ce74e0cb9dea06c4e4ca3f7c87f35fabb2f2..17e4bbd72038e258e1f086045d52d5c06f363ab5 100644
--- a/common/cpp/src/json_parser/rapid_json.cpp
+++ b/common/cpp/src/json_parser/rapid_json.cpp
@@ -211,4 +211,43 @@ Error RapidJson::GetArrayRawStrings(const std::string& name, std::vector<std::st
 
 }
 
+
+void AddVals(const std::string& prefix, const std::string& separator, Document& d, Document::AllocatorType& a,
+             Value* vals, Value* obj) {
+    for (auto& m : obj->GetObject()) {
+        std::string name;
+        if (!prefix.empty()) {
+            name = prefix + separator + m.name.GetString();
+        } else {
+            name = m.name.GetString();
+        }
+        if (m.value.IsObject()) {
+            AddVals(name, separator, d, a, vals, &m.value);
+            return;
+        }
+        Value s;
+        s.SetString(name.c_str(), name.size(), a);
+        vals->AddMember(s, Value(m.value, a), d.GetAllocator());
+    }
+}
+
+Error RapidJson::GetFlattenedString(const std::string& prefix, const std::string& separator,
+                                    std::string* val) const noexcept {
+    Document d;
+    Document::AllocatorType& a = d.GetAllocator();
+    Value vals(kObjectType);
+
+    if (Error err = LazyInitialize()) {
+        return err;
+    }
+
+    AddVals(prefix, separator, d, a, &vals, object_p_);
+
+    StringBuffer buffer;
+    Writer<StringBuffer> writer(buffer);
+    vals.Accept(writer);
+    val->assign(buffer.GetString());
+    return nullptr;
+}
+
 }
\ No newline at end of file
diff --git a/common/cpp/src/json_parser/rapid_json.h b/common/cpp/src/json_parser/rapid_json.h
index 2af01d1f430d37b4f6930bd6ba4e28c61b53d4eb..b4cbc88711dfcc0f8e7ae6f21257277b9c10469e 100644
--- a/common/cpp/src/json_parser/rapid_json.h
+++ b/common/cpp/src/json_parser/rapid_json.h
@@ -28,6 +28,7 @@ class RapidJson {
     Error GetArrayString(const std::string& name, std::vector<std::string>* val) const noexcept;
     Error GetArrayRawStrings(const std::string& name, std::vector<std::string>* val) const noexcept;
     Error GetRawString(std::string* val) const noexcept;
+    Error GetFlattenedString(const std::string& prefix, const std::string& separator, std::string* val)const noexcept;
   private:
     Error GetInt64(const std::string& name, int64_t* val) const noexcept;
     const std::unique_ptr<IO>* io__;
diff --git a/common/cpp/unittests/json_parser/test_json_parser.cpp b/common/cpp/unittests/json_parser/test_json_parser.cpp
index d8232c3a0b27bb07351641f4dd1edb11e68946f5..dfc5f3c6c08e634e230ca637a6314f52c60cea9e 100644
--- a/common/cpp/unittests/json_parser/test_json_parser.cpp
+++ b/common/cpp/unittests/json_parser/test_json_parser.cpp
@@ -266,4 +266,19 @@ TEST_F(ParseFileTests, CannotReadFile) {
 
 }
 
+
+
+TEST_F(ParseFileTests, Flatten) {
+    std::string json = R"({"top":"top","embedded":{"ar":[2,2,3],"str":"text"}})";
+    std::string json_flat = R"({"meta.top":"top","meta.embedded.ar":[2,2,3],"meta.embedded.str":"text"})";
+    JsonStringParser parser{json};
+
+    std::string res;
+    auto err = parser.GetFlattenedString("meta", ".", &res);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(res, Eq(json_flat));
+
+}
+
+
 }
diff --git a/common/cpp/unittests/request/test_request_pool.cpp b/common/cpp/unittests/request/test_request_pool.cpp
index f359c1c4218d33d8943862cd30f3d53412686db4..e11ae7a8dde114c13e3a577d0b81ee8179a6a2cf 100644
--- a/common/cpp/unittests/request/test_request_pool.cpp
+++ b/common/cpp/unittests/request/test_request_pool.cpp
@@ -46,6 +46,7 @@ class MockRequestHandlerFactory : public asapo::RequestHandlerFactory {
     std::unique_ptr<RequestHandler> NewRequestHandler(uint64_t thread_id, uint64_t* shared_counter) override {
         return std::unique_ptr<RequestHandler> {request_handler_};
     }
+    ~MockRequestHandlerFactory() {};
   private:
     RequestHandler* request_handler_;
 };
diff --git a/consumer/api/cpp/include/asapo/consumer/consumer.h b/consumer/api/cpp/include/asapo/consumer/consumer.h
index e6afb18b8d9de09bd6ce72abf5ec671385fde755..f7a645f467a4d20feae40bbd045fe92eb0d5f74a 100644
--- a/consumer/api/cpp/include/asapo/consumer/consumer.h
+++ b/consumer/api/cpp/include/asapo/consumer/consumer.h
@@ -102,7 +102,7 @@ class Consumer {
     //! Get current number of messages in stream
     /*!
       \param stream - stream to use
-      \param err - return nullptr of operation succeed, error otherwise.
+      \param err - return nullptr if operation succeed, error otherwise.
       \return number of datasets.
     */
     virtual uint64_t GetCurrentSize(std::string stream, Error* err) = 0;
@@ -111,14 +111,14 @@ class Consumer {
     /*!
       \param stream - stream to use
       \param include_incomplete - flag to count incomplete datasets as well
-      \param err - return nullptr of operation succeed, error otherwise.
+      \param err - return nullptr if operation succeed, error otherwise.
       \return number of datasets.
     */
     virtual uint64_t GetCurrentDatasetCount(std::string stream, bool include_incomplete, Error* err) = 0;
 
     //! Generate new GroupID.
     /*!
-      \param err - return nullptr of operation succeed, error otherwise.
+      \param err - return nullptr if operation succeed, error otherwise.
       \return group ID.
     */
 
@@ -126,11 +126,19 @@ class Consumer {
 
     //! Get Beamtime metadata.
     /*!
-      \param err - return nullptr of operation succeed, error otherwise.
+      \param err - return nullptr if operation succeed, error otherwise.
       \return beamtime metadata.
     */
     virtual std::string GetBeamtimeMeta(Error* err) = 0;
 
+    //! Get stream metadata.
+    /*!
+      \param stream - stream to use
+      \param err - return nullptr if operation succeed, error otherwise.
+      \return stream metadata.
+    */
+    virtual std::string GetStreamMeta(const std::string& stream, Error* err) = 0;
+
     //! Receive next available message.
     /*!
       \param info -  where to store message metadata. Can be set to nullptr only message data is needed.
diff --git a/consumer/api/cpp/src/consumer_impl.cpp b/consumer/api/cpp/src/consumer_impl.cpp
index 00ca9b96158f167159ff5817927a059a2f862fec..6dbeeaaef2b7784875e3a0fb57520be94438077e 100644
--- a/consumer/api/cpp/src/consumer_impl.cpp
+++ b/consumer/api/cpp/src/consumer_impl.cpp
@@ -292,7 +292,7 @@ Error ConsumerImpl::GetRecordFromServer(std::string* response, std::string group
     std::string request_suffix = OpToUriCmd(op);
     std::string request_group = OpToUriCmd(op);
 
-    std::string request_api = UriPrefix(std::move(stream), "", "");
+    std::string request_api = BrokerApiUri(std::move(stream), "", "");
     uint64_t elapsed_ms = 0;
     Error no_data_error;
     while (true) {
@@ -576,7 +576,7 @@ Error ConsumerImpl::ResetLastReadMarker(std::string group_id, std::string stream
 
 Error ConsumerImpl::SetLastReadMarker(std::string group_id, uint64_t value, std::string stream) {
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), "resetcounter");
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), "resetcounter");
 
     ri.extra_params = "&value=" + std::to_string(value);
     ri.post = true;
@@ -606,7 +606,7 @@ Error ConsumerImpl::GetRecordFromServerById(uint64_t id, std::string* response,
     }
 
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), std::to_string(id));
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), std::to_string(id));
 
 
     if (dataset) {
@@ -621,11 +621,19 @@ Error ConsumerImpl::GetRecordFromServerById(uint64_t id, std::string* response,
 
 std::string ConsumerImpl::GetBeamtimeMeta(Error* err) {
     RequestInfo ri;
-    ri.api = UriPrefix("default", "0", "meta/0");
+    ri.api = BrokerApiUri("default", "0", "meta/0");
 
     return BrokerRequestWithTimeout(ri, err);
 }
 
+std::string ConsumerImpl::GetStreamMeta(const std::string& stream, Error* err) {
+    RequestInfo ri;
+    ri.api = BrokerApiUri(stream, "0", "meta/1");
+
+    return BrokerRequestWithTimeout(ri, err);
+}
+
+
 DataSet DecodeDatasetFromResponse(std::string response, Error* err) {
     DataSet res;
     if (!res.SetFromJson(std::move(response))) {
@@ -643,7 +651,7 @@ MessageMetas ConsumerImpl::QueryMessages(std::string query, std::string stream,
     }
 
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), "0", "querymessages");
+    ri.api = BrokerApiUri(std::move(stream), "0", "querymessages");
 
     ri.post = true;
     ri.body = std::move(query);
@@ -741,7 +749,7 @@ StreamInfos ConsumerImpl::GetStreamList(std::string from, StreamFilter filter, E
 
 RequestInfo ConsumerImpl::GetStreamListRequest(const std::string& from, const StreamFilter& filter) const {
     RequestInfo ri;
-    ri.api = UriPrefix("0", "", "streams");
+    ri.api = BrokerApiUri("0", "", "streams");
     ri.post = false;
     if (!from.empty()) {
         ri.extra_params = "&from=" + httpclient__->UrlEscape(from);
@@ -809,7 +817,7 @@ Error ConsumerImpl::Acknowledge(std::string group_id, uint64_t id, std::string s
         return ConsumerErrorTemplates::kWrongInput.Generate("empty stream");
     }
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), std::to_string(id));
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), std::to_string(id));
     ri.post = true;
     ri.body = "{\"Op\":\"ackmessage\"}";
 
@@ -828,7 +836,7 @@ IdList ConsumerImpl::GetUnacknowledgedMessages(std::string group_id,
         return {};
     }
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), "nacks");
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), "nacks");
     ri.extra_params = "&from=" + std::to_string(from_id) + "&to=" + std::to_string(to_id);
 
     auto json_string = BrokerRequestWithTimeout(ri, error);
@@ -851,7 +859,7 @@ uint64_t ConsumerImpl::GetLastAcknowledgedMessage(std::string group_id, std::str
         return 0;
     }
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), "lastack");
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), "lastack");
 
     auto json_string = BrokerRequestWithTimeout(ri, error);
     if (*error) {
@@ -884,7 +892,7 @@ Error ConsumerImpl::NegativeAcknowledge(std::string group_id,
         return ConsumerErrorTemplates::kWrongInput.Generate("empty stream");
     }
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), std::move(group_id), std::to_string(id));
+    ri.api = BrokerApiUri(std::move(stream), std::move(group_id), std::to_string(id));
     ri.post = true;
     ri.body = R"({"Op":"negackmessage","Params":{"DelayMs":)" + std::to_string(delay_ms) + "}}";
 
@@ -926,7 +934,7 @@ uint64_t ConsumerImpl::ParseGetCurrentCountResponce(Error* err, const std::strin
 
 RequestInfo ConsumerImpl::GetSizeRequestForSingleMessagesStream(std::string& stream) const {
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), "", "size");
+    ri.api = BrokerApiUri(std::move(stream), "", "size");
     return ri;
 }
 
@@ -966,7 +974,7 @@ Error ConsumerImpl::GetVersionInfo(std::string* client_info, std::string* server
 
 RequestInfo ConsumerImpl::GetDeleteStreamRequest(std::string stream, DeleteStreamOptions options) const {
     RequestInfo ri;
-    ri.api = UriPrefix(std::move(stream), "", "delete");
+    ri.api = BrokerApiUri(std::move(stream), "", "delete");
     ri.post = true;
     ri.body = options.Json();
     return ri;
@@ -979,7 +987,7 @@ Error ConsumerImpl::DeleteStream(std::string stream, DeleteStreamOptions options
     return err;
 }
 
-std::string ConsumerImpl::UriPrefix( std::string stream, std::string group, std::string suffix) const {
+std::string ConsumerImpl::BrokerApiUri(std::string stream, std::string group, std::string suffix) const {
     auto stream_encoded = httpclient__->UrlEscape(std::move(stream));
     auto group_encoded = group.size() > 0 ? httpclient__->UrlEscape(std::move(group)) : "";
     auto uri = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
@@ -995,4 +1003,5 @@ std::string ConsumerImpl::UriPrefix( std::string stream, std::string group, std:
 
 }
 
+
 }
\ No newline at end of file
diff --git a/consumer/api/cpp/src/consumer_impl.h b/consumer/api/cpp/src/consumer_impl.h
index 2804309ae2446656e4c49224b8f917ba695c526a..52b7fa900682232ec946d0ea0bf8b03876608bfd 100644
--- a/consumer/api/cpp/src/consumer_impl.h
+++ b/consumer/api/cpp/src/consumer_impl.h
@@ -79,6 +79,7 @@ class ConsumerImpl final : public asapo::Consumer {
 
     std::string GenerateNewGroupId(Error* err) override;
     std::string GetBeamtimeMeta(Error* err) override;
+    std::string GetStreamMeta(const std::string& stream, Error* err) override;
 
     uint64_t GetCurrentSize(std::string stream, Error* err) override;
     uint64_t GetCurrentDatasetCount(std::string stream, bool include_incomplete, Error* err) override;
@@ -150,7 +151,7 @@ class ConsumerImpl final : public asapo::Consumer {
     uint64_t GetCurrentCount(std::string stream, const RequestInfo& ri, Error* err);
     RequestInfo GetStreamListRequest(const std::string& from, const StreamFilter& filter) const;
     Error GetServerVersionInfo(std::string* server_info, bool* supported) ;
-    std::string UriPrefix( std::string stream, std::string group, std::string suffix) const;
+    std::string BrokerApiUri(std::string stream, std::string group, std::string suffix) const;
 
     std::string endpoint_;
     std::string current_broker_uri_;
diff --git a/consumer/api/cpp/unittests/test_consumer_impl.cpp b/consumer/api/cpp/unittests/test_consumer_impl.cpp
index 517809314ec2da0d45d9e34fcc967042174d2eeb..bb279cff58b4e7e3ae99908eee85bc19186a0576 100644
--- a/consumer/api/cpp/unittests/test_consumer_impl.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_impl.cpp
@@ -792,7 +792,7 @@ TEST_F(ConsumerImplTests, GetByIdReturnsEndOfStreamWhenIdTooLarge) {
     ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kEndOfStream));
 }
 
-TEST_F(ConsumerImplTests, GetMetaDataOK) {
+TEST_F(ConsumerImplTests, GetBeamtimeMetaDataOK) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
@@ -812,6 +812,25 @@ TEST_F(ConsumerImplTests, GetMetaDataOK) {
 
 }
 
+TEST_F(ConsumerImplTests, GetStreamMetaDataOK) {
+    MockGetBrokerUri();
+    consumer->SetTimeout(100);
+
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source_encoded +
+                                        "/" + expected_stream_encoded + "/0/meta/1?token="
+                                        + expected_token, _,
+                                        _)).WillOnce(DoAll(
+                                                SetArgPointee<1>(HttpCode::OK),
+                                                SetArgPointee<2>(nullptr),
+                                                Return(expected_metadata)));
+
+    asapo::Error err;
+    auto res = consumer->GetStreamMeta(expected_stream, &err);
+
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(res, Eq(expected_metadata));
+}
+
 TEST_F(ConsumerImplTests, QueryMessagesReturnError) {
     MockGetBrokerUri();
 
diff --git a/consumer/api/python/asapo_consumer.pxd b/consumer/api/python/asapo_consumer.pxd
index e2c7846b82f3dbee3e570a7a04a0667016b1c411..1347d886237eb27133ff20d87f2a11d3e1e7a54f 100644
--- a/consumer/api/python/asapo_consumer.pxd
+++ b/consumer/api/python/asapo_consumer.pxd
@@ -82,6 +82,7 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
         IdList GetUnacknowledgedMessages(string group_id, uint64_t from_id, uint64_t to_id, string stream, Error* error)
         string GenerateNewGroupId(Error* err)
         string GetBeamtimeMeta(Error* err)
+        string GetStreamMeta(string stream,Error* err)
         MessageMetas QueryMessages(string query, string stream, Error* err)
         DataSet GetNextDataset(string group_id, uint64_t min_size, string stream, Error* err)
         DataSet GetLastDataset(uint64_t min_size, string stream, Error* err)
diff --git a/consumer/api/python/asapo_consumer.pyx.in b/consumer/api/python/asapo_consumer.pyx.in
index 6825c2e3ec5d029cba9f505d20ff3080a45ae3cb..1efc446dc622ab7fc7d1043f9a0be1cea6cde6e4 100644
--- a/consumer/api/python/asapo_consumer.pyx.in
+++ b/consumer/api/python/asapo_consumer.pyx.in
@@ -375,8 +375,18 @@ cdef class PyConsumer:
         if err:
             throw_exception(err)
         meta = json.loads(_str(meta_str))
-        del meta['_id']
         return meta
+    def get_stream_meta(self, stream = 'default'):
+        cdef Error err
+        cdef string b_stream = _bytes(stream)
+        cdef string meta_str
+        with nogil:
+            meta_str = self.c_consumer.get().GetStreamMeta(b_stream,&err)
+        if err:
+            throw_exception(err)
+        meta = json.loads(_str(meta_str))
+        return meta
+
     def interrupt_current_operation(self):
         self.c_consumer.get().InterruptCurrentOperation()
 cdef class __PyConsumerFactory:
diff --git a/consumer/api/python/dist_linux/CMakeLists.txt b/consumer/api/python/dist_linux/CMakeLists.txt
index 097f8081f12fb5f2184b40b92ab0bfe10f45bcc0..daaf9bfcaac816bb0274c7078346fb0653192eab 100644
--- a/consumer/api/python/dist_linux/CMakeLists.txt
+++ b/consumer/api/python/dist_linux/CMakeLists.txt
@@ -49,6 +49,7 @@ ADD_CUSTOM_TARGET(copy_python_dist-consumer ALL
         COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/../asapo_consumer.cpp ${CMAKE_CURRENT_BINARY_DIR}/.
         COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/consumer/api/cpp/include ${CMAKE_CURRENT_BINARY_DIR}/include
         COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/common ${CMAKE_CURRENT_BINARY_DIR}/include/asapo/common
+        COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/preprocessor ${CMAKE_CURRENT_BINARY_DIR}/include/asapo/preprocessor
         COMMAND ${CMAKE_COMMAND} -E remove ${CMAKE_CURRENT_BINARY_DIR}/include/common/version.h.in
         COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/lib
         COMMAND ${CMAKE_COMMAND} -E copy ${CURL_LIBRARIES} ${CMAKE_CURRENT_BINARY_DIR}/lib
diff --git a/examples/consumer/getnext_python/check_linux.sh b/examples/consumer/getnext_python/check_linux.sh
index 9f6fb85bd8f428a51c2cc9a163255c8e217ba456..5e7040bd39b6c5fc9190eab44b5d79c7f7753941 100644
--- a/examples/consumer/getnext_python/check_linux.sh
+++ b/examples/consumer/getnext_python/check_linux.sh
@@ -20,7 +20,7 @@ do
 	echo 'db.data_default.insert({"_id":'$i',"size":100,"name":"'$i'","timestamp":0,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
 done
 
-echo 'db.meta.insert({"_id":0,"meta_test":"test"})' | mongo ${database_name}
+echo 'db.meta.insert({"_id":"bt","meta":{"meta_test":"test"}})' | mongo ${database_name}
 
 sleep 1
 
diff --git a/examples/consumer/getnext_python/check_windows.bat b/examples/consumer/getnext_python/check_windows.bat
index e48e27bd8c3a9a23fef9fb366ac6ef564a297057..9c0f94087c431e4d272bdccc96798d5f4793a93b 100644
--- a/examples/consumer/getnext_python/check_windows.bat
+++ b/examples/consumer/getnext_python/check_windows.bat
@@ -10,7 +10,7 @@ set group_id=bif31l2uiddd4r0q6b40
 for /l %%x in (1, 1, 3) do echo db.data_default.insert({"_id":%%x,"size":100,"name":"%%x","timestamp":0,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
 
 
-echo db.meta.insert({"_id":0,"meta_test":"test"}) | %mongo_exe% %database_name%  || goto :error
+echo db.meta.insert({"_id":"bt","meta":{"meta_test":"test"}}) | %mongo_exe% %database_name%  || goto :error
 
 set PYTHONPATH=%1
 
diff --git a/examples/consumer/simple-consumer/CMakeLists.txt b/examples/consumer/simple-consumer/CMakeLists.txt
deleted file mode 100644
index 359e4d7cdc9458592e7ccb1403e36bbf3d0cdcdb..0000000000000000000000000000000000000000
--- a/examples/consumer/simple-consumer/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-cmake_minimum_required(VERSION 2.8)
-
-project(asapo-consume)
-
-set(CMAKE_CXX_STANDARD 11)
-
-IF(CMAKE_C_COMPILER_ID STREQUAL "GNU")
-    SET( CMAKE_EXE_LINKER_FLAGS  "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
-ENDIF()
-
-find_package (Threads)
-
-set(TARGET_NAME "asapo-consume")
-
-set(SOURCE_FILES consume.cpp)
-
-link_directories(asapo/lib)
-
-add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_include_directories(${TARGET_NAME} PUBLIC asapo/include)
-target_link_libraries(${TARGET_NAME} asapo-consumer curl ${CMAKE_THREAD_LIBS_INIT})
diff --git a/examples/consumer/simple-consumer/consume.cpp b/examples/consumer/simple-consumer/consume.cpp
deleted file mode 100644
index e08857772ca2feec13e3f73919886e9b22174ae7..0000000000000000000000000000000000000000
--- a/examples/consumer/simple-consumer/consume.cpp
+++ /dev/null
@@ -1,36 +0,0 @@
-#include "asapo/asapo_consumer.h"
-#include <iostream>
-
-void exit_if_error(std::string error_string, const asapo::Error& err) {
-    if (err) {
-        std::cerr << error_string << err << std::endl;
-        exit(EXIT_FAILURE);
-    }
-}
-
-int main(int argc, char* argv[]) {
-    asapo::Error err;
-
-    auto endpoint = "asapo-services2:8400";
-    auto beamtime = "asapo_test";
-    auto token = "KmUDdacgBzaOD3NIJvN1NmKGqWKtx0DK-NyPjdpeWkc=";
-
-    auto consumer = asapo::ConsumerFactory::CreateConsumer(endpoint, "", true, asapo::SourceCredentials{asapo::SourceType::kProcessed, beamtime, "", "", token}, &err);
-    exit_if_error("Cannot create consumer", err);
-    consumer->SetTimeout((uint64_t) 1000);
-
-    auto group_id = consumer->GenerateNewGroupId(&err);
-    exit_if_error("Cannot create group id", err);
-
-    asapo::MessageMeta fi;
-    asapo::MessageData data;
-
-    err = consumer->GetLast(&fi, &data, group_id);
-    exit_if_error("Cannot get next record", err);
-
-    std::cout << "id: " << fi.id << std::endl;
-    std::cout << "file name: " << fi.name << std::endl;
-    std::cout << "file content: " << reinterpret_cast<char const*>(data.get()) << std::endl;
-    return EXIT_SUCCESS;
-}
-
diff --git a/examples/pipeline/in_to_out_python/check_linux.sh b/examples/pipeline/in_to_out_python/check_linux.sh
index 04c4fef60afd82c449055f992e913efaa46f60c5..c2d2fb48b33293d587a0b5dae855962f8e289977 100644
--- a/examples/pipeline/in_to_out_python/check_linux.sh
+++ b/examples/pipeline/in_to_out_python/check_linux.sh
@@ -57,7 +57,9 @@ export PYTHONPATH=$2:$3:${PYTHONPATH}
 $1 $4 127.0.0.1:8400 $source_path $beamtime_id $data_source_in $data_source_out $token $timeout $timeout_producer $nthreads 1  > out
 cat out
 cat out | grep "Processed 3 file(s)"
-cat out | grep "Sent 3 file(s)"
+cat out | grep "Sent 5 file(s)"
+cat out | grep bt_meta
+cat out | grep st_meta
 
 echo "db.data_default.find({"_id":1})" | mongo ${outdatabase_name} | tee /dev/stderr | grep "file1_${data_source_out}"
 
diff --git a/examples/pipeline/in_to_out_python/check_windows.bat b/examples/pipeline/in_to_out_python/check_windows.bat
index 44d24a150d7af85f80ed349772f287b5821a1cb3..4dbce93526e1db54370b8380f489ba60bbca672e 100644
--- a/examples/pipeline/in_to_out_python/check_windows.bat
+++ b/examples/pipeline/in_to_out_python/check_windows.bat
@@ -35,7 +35,9 @@ set PYTHONPATH=%2;%3
 
 type out
 findstr /I /L /C:"Processed 3 file(s)" out || goto :error
-findstr /I /L /C:"Sent 3 file(s)" out || goto :error
+findstr /I /L /C:"Sent 5 file(s)" out || goto :error
+findstr /I /L /C:"bt_meta" out || goto :error
+findstr /I /L /C:"st_meta" out || goto :error
 
 echo db.data_default.find({"_id":1}) | %mongo_exe% %outdatabase_name% | findstr  /c:"file1_%data_source_out%"  || goto :error
 
diff --git a/examples/pipeline/in_to_out_python/in_to_out.py b/examples/pipeline/in_to_out_python/in_to_out.py
index 93e7328e040c8f6eceb1a37dcc555b8c6f6702c7..94d3802d786eb53b30626f0ac7ea07cb2c9b2f9e 100644
--- a/examples/pipeline/in_to_out_python/in_to_out.py
+++ b/examples/pipeline/in_to_out_python/in_to_out.py
@@ -36,6 +36,10 @@ group_id  = consumer.generate_group_id()
 
 n_recv = 0
 
+producer.send_beamtime_meta('{"data":"bt_meta"}', callback = callback)
+producer.send_stream_meta('{"data":"st_meta"}',stream = 'stream_in', callback = callback)
+
+
 if transfer_data:
     ingest_mode = asapo_producer.DEFAULT_INGEST_MODE
 else:
@@ -55,5 +59,12 @@ while True:
 
 producer.wait_requests_finished(timeout_s_producer*1000)
 
+consumer = asapo_consumer.create_consumer(source,path, True,beamtime,stream_out,token,timeout_s*1000)
+bt_meta = consumer.get_beamtime_meta()
+st_meta = consumer.get_stream_meta('stream_in')
+print ('bt_meta:',bt_meta)
+print ('st_meta:',st_meta)
+
+
 print ("Processed "+str(n_recv)+" file(s)")
 print ("Sent "+str(n_send)+" file(s)")
diff --git a/producer/api/cpp/include/asapo/producer/producer.h b/producer/api/cpp/include/asapo/producer/producer.h
index faa23467f770aeb740b771a0f6c9de60676bf724..9d09d71f8ee4093f503052a0a57df5332ff9ee33 100644
--- a/producer/api/cpp/include/asapo/producer/producer.h
+++ b/producer/api/cpp/include/asapo/producer/producer.h
@@ -7,6 +7,7 @@
 #include "asapo/logger/logger.h"
 #include "common.h"
 #include "asapo/common/data_structs.h"
+#include "asapo/preprocessor/definitions.h"
 
 namespace asapo {
 
@@ -40,6 +41,21 @@ class Producer {
     */
     virtual StreamInfo GetStreamInfo(std::string stream, uint64_t timeout_ms, Error* err) const = 0;
 
+    //! Get stream metadata from receiver
+    /*!
+      \param stream - stream to send messages to
+      \param timeout_ms - operation timeout in milliseconds
+      \return JSON string with metadata
+    */
+    virtual std::string GetStreamMeta(const std::string& stream, uint64_t timeout_ms, Error* err) const = 0;
+
+    //! Get beamtime metadata from receiver
+    /*!
+      \param timeout_ms - operation timeout in milliseconds
+      \return JSON string with metadata
+    */
+    virtual std::string GetBeamtimeMeta(uint64_t timeout_ms, Error* err) const = 0;
+
     //! Delete stream
     /*!
       \param stream - stream to send messages to
@@ -101,16 +117,39 @@ class Producer {
       \return Error - Will be nullptr on success
     */
     virtual Error SendStreamFinishedFlag(std::string stream, uint64_t last_id, std::string next_stream,
-                                         RequestCallback callback) = 0;
+                                         RequestCallback callback) = 0 ;
 
 
-    //! Sends metadata for the current beamtime to the receiver
+    //! Sends beamtime metadata to the receiver
+    /*!
+      \deprecated { deprecated, obsolates 01.07.2022, use SendBeamtimeMetadata instead}
+      \param metadata - a JSON string with metadata
+      \param callback - callback function
+      \return Error - will be nullptr on success
+    */
+    virtual Error DEPRECATED("obsolates 01.07.2022, use SendBeamtimeMetadata instead") SendMetadata(
+        const std::string& metadata,
+        RequestCallback callback)  = 0;
+
+    //! Sends beamtime metadata to the receiver
+    /*!
+      \param metadata - a JSON string with metadata
+      \param callback - callback function
+      \return Error - will be nullptr on success
+    */
+    virtual Error SendBeamtimeMetadata(const std::string& metadata, MetaIngestMode mode, RequestCallback callback) = 0;
+
+    //! Sends stream metadata to the receiver
     /*!
+      \param stream - name of the stream
       \param metadata - a JSON string with metadata
       \param callback - callback function
       \return Error - will be nullptr on success
     */
-    virtual Error SendMetadata(const std::string& metadata, RequestCallback callback) = 0;
+    virtual Error SendStreamMetadata(const std::string& metadata,
+                                     MetaIngestMode mode,
+                                     const std::string& stream,
+                                     RequestCallback callback) = 0;
 
     //! Set internal log level
     virtual void SetLogLevel(LogLevel level) = 0;
diff --git a/producer/api/cpp/src/producer_impl.cpp b/producer/api/cpp/src/producer_impl.cpp
index f13f710200bee38fe466f1452011664b21e8b11c..a0f2c7477e40b883bf057697c7527e83cc5f8620 100644
--- a/producer/api/cpp/src/producer_impl.cpp
+++ b/producer/api/cpp/src/producer_impl.cpp
@@ -35,7 +35,7 @@ ProducerImpl::ProducerImpl(std::string endpoint, uint8_t n_processing_threads, u
 GenericRequestHeader ProducerImpl::GenerateNextSendRequest(const MessageHeader& message_header, std::string stream,
         uint64_t ingest_mode) {
     GenericRequestHeader request{kOpcodeTransferData, message_header.message_id, message_header.data_size,
-                                 message_header.user_metadata.size(), message_header.file_name, stream};
+                                 message_header.user_metadata.size(), message_header.file_name, std::move(stream)};
     if (message_header.dataset_substream != 0) {
         request.op_code = kOpcodeTransferDatasetData;
         request.custom_data[kPosDataSetId] = message_header.dataset_substream;
@@ -251,17 +251,8 @@ Error ProducerImpl::SetCredentials(SourceCredentials source_cred) {
 }
 
 Error ProducerImpl::SendMetadata(const std::string& metadata, RequestCallback callback) {
-    GenericRequestHeader request_header{kOpcodeTransferMetaData, 0, metadata.size(), 0, "beamtime_global.meta"};
-    request_header.custom_data[kPosIngestMode] = asapo::IngestModeFlags::kTransferData |
-                                                 asapo::IngestModeFlags::kStoreInDatabase;
-    MessageData data{new uint8_t[metadata.size()]};
-    strncpy((char*) data.get(), metadata.c_str(), metadata.size());
-    auto err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {
-        new ProducerRequest{
-            source_cred_string_, std::move(request_header),
-            std::move(data), "", "", callback, true, timeout_ms_}
-    });
-    return HandleErrorFromPool(std::move(err), true);
+    auto mode = MetaIngestMode{MetaIngestOp::kReplace, true};
+    return SendBeamtimeMetadata(metadata, mode, callback);
 }
 
 Error ProducerImpl::Send__(const MessageHeader& message_header,
@@ -327,14 +318,13 @@ RequestCallback unwrap_callback(RequestCallbackWithPromise<T> callback,
     return wrapper;
 }
 
-void ActivatePromiseForStreamInfo(std::shared_ptr<std::promise<StreamInfoResult>> promise,
-                                  RequestCallbackPayload payload,
-                                  Error err) {
-    StreamInfoResult res;
+void ActivatePromiseForReceiverResponse(std::shared_ptr<std::promise<ReceiverResponse>> promise,
+                                        RequestCallbackPayload payload,
+                                        Error err) {
+    ReceiverResponse res;
     if (err == nullptr) {
-        auto ok = res.sinfo.SetFromJson(payload.response);
-        res.err = ok ? nullptr : ProducerErrorTemplates::kInternalServerError.Generate(
-                      std::string("cannot read JSON string from server response: ") + payload.response).release();
+        res.payload = payload.response;
+        res.err = nullptr;
     } else {
         res.err = err.release();
     }
@@ -343,20 +333,6 @@ void ActivatePromiseForStreamInfo(std::shared_ptr<std::promise<StreamInfoResult>
     } catch (...) {}
 }
 
-void ActivatePromiseForErrorInterface(std::shared_ptr<std::promise<ErrorInterface*>> promise,
-                                      RequestCallbackPayload payload,
-                                      Error err) {
-    ErrorInterface* res;
-    if (err == nullptr) {
-        res = nullptr;
-    } else {
-        res = err.release();
-    }
-    try {
-        promise->set_value(res);
-    } catch (...) {}
-}
-
 template<class T>
 T GetResultFromCallback(std::future<T>* promiseResult, uint64_t timeout_ms, Error* err) {
     try {
@@ -380,31 +356,55 @@ GenericRequestHeader CreateRequestHeaderFromOp(StreamRequestOp op, std::string s
     return GenericRequestHeader{};
 }
 
-StreamInfo ProducerImpl::StreamRequest(StreamRequestOp op, std::string stream, uint64_t timeout_ms, Error* err) const {
-    auto header = CreateRequestHeaderFromOp(op, stream);
-    std::unique_ptr<std::promise<StreamInfoResult>> promise{new std::promise<StreamInfoResult>};
-    std::future<StreamInfoResult> promiseResult = promise->get_future();
+std::string ProducerImpl::BlockingRequest(GenericRequestHeader header, uint64_t timeout_ms, Error* err) const {
+    std::unique_ptr<std::promise<ReceiverResponse>> promise{new std::promise<ReceiverResponse>};
+    std::future<ReceiverResponse> promiseResult = promise->get_future();
 
     *err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {
         new ProducerRequest{
             source_cred_string_, std::move(header),
             nullptr, "", "",
             unwrap_callback(
-                ActivatePromiseForStreamInfo,
+                ActivatePromiseForReceiverResponse,
                 std::move(promise)), true,
             timeout_ms}
     }, true);
     if (*err) {
-        return StreamInfo{};
+        return "";
     }
-    auto res = GetResultFromCallback<StreamInfoResult>(&promiseResult, timeout_ms + 2000,
+
+    auto res = GetResultFromCallback<ReceiverResponse>(&promiseResult, timeout_ms + 2000,
                err); // we give two more sec for request to exit by timeout
+    if (*err) {
+        return "";
+    }
+
+
     if (res.err == nullptr) {
-        return res.sinfo;
+        return res.payload;
     } else {
         (*err).reset(res.err);
+        return "";
+    }
+}
+
+StreamInfo ProducerImpl::StreamRequest(StreamRequestOp op, std::string stream, uint64_t timeout_ms, Error* err) const {
+    auto header = CreateRequestHeaderFromOp(op, stream);
+
+    auto response = BlockingRequest(std::move(header), timeout_ms, err);
+    if (*err) {
+        return StreamInfo{};
+    }
+
+    StreamInfo res;
+    if (!res.SetFromJson(response)) {
+        *err = ProducerErrorTemplates::kInternalServerError.Generate(
+                   std::string("cannot read JSON string from server response: ") + response);
         return StreamInfo{};
     }
+
+    *err = nullptr;
+    return res;
 }
 
 StreamInfo ProducerImpl::GetStreamInfo(std::string stream, uint64_t timeout_ms, Error* err) const {
@@ -459,28 +459,61 @@ Error ProducerImpl::DeleteStream(std::string stream, uint64_t timeout_ms, Delete
     auto header = GenericRequestHeader{kOpcodeDeleteStream, 0, 0, 0, "", stream};
     header.custom_data[0] = options.Encode();
 
-    std::unique_ptr<std::promise<ErrorInterface*>> promise{new std::promise<ErrorInterface*>};
-    std::future<ErrorInterface*> promiseResult = promise->get_future();
+    Error err;
+    BlockingRequest(std::move(header), timeout_ms, &err);
+    return err;
+}
+
+Error ProducerImpl::SendBeamtimeMetadata(const std::string& metadata, MetaIngestMode mode, RequestCallback callback) {
+    return SendMeta(metadata, mode, "", callback);
+}
+
+Error ProducerImpl::SendStreamMetadata(const std::string& metadata,
+                                       MetaIngestMode mode,
+                                       const std::string& stream,
+                                       RequestCallback callback) {
+    if (stream.empty()) {
+        return ProducerErrorTemplates::kWrongInput.Generate("stream is empty");
+    }
+    return SendMeta(metadata, mode, stream, callback);
+}
 
+Error ProducerImpl::SendMeta(const std::string& metadata,
+                             MetaIngestMode mode,
+                             std::string stream,
+                             RequestCallback callback) {
+    GenericRequestHeader request_header{kOpcodeTransferMetaData, 0, metadata.size(), 0,
+                                        stream.empty() ? "beamtime_global.meta" : stream + ".meta",
+                                        stream};
+    request_header.custom_data[kPosIngestMode] = asapo::IngestModeFlags::kTransferData |
+                                                 asapo::IngestModeFlags::kStoreInDatabase;
+    request_header.custom_data[kPosMetaIngestMode] = mode.Encode();
+    MessageData data{new uint8_t[metadata.size()]};
+    strncpy((char*) data.get(), metadata.c_str(), metadata.size());
     auto err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {
         new ProducerRequest{
-            source_cred_string_, std::move(header),
-            nullptr, "", "",
-            unwrap_callback<ErrorInterface*>(
-                ActivatePromiseForErrorInterface,
-                std::move(promise)), true,
-            timeout_ms}
-    }, true);
-    if (err) {
-        return err;
-    }
+            source_cred_string_, std::move(request_header),
+            std::move(data), "", "", callback, true, timeout_ms_}
+    });
+    return HandleErrorFromPool(std::move(err), true);
+}
 
-    auto res = GetResultFromCallback<ErrorInterface*>(&promiseResult, timeout_ms + 2000,
-                                                      &err); // we give two more sec for request to exit by timeout
-    if (err) {
-        return err;
+std::string ProducerImpl::GetStreamMeta(const std::string& stream, uint64_t timeout_ms, Error* err) const {
+    return GetMeta(stream, timeout_ms, err);
+}
+
+std::string ProducerImpl::GetBeamtimeMeta(uint64_t timeout_ms, Error* err) const {
+    return GetMeta("", timeout_ms, err);
+}
+
+std::string ProducerImpl::GetMeta(const std::string& stream, uint64_t timeout_ms, Error* err) const {
+    auto header =  GenericRequestHeader{kOpcodeGetMeta, 0, 0, 0, "", stream};
+    auto response = BlockingRequest(std::move(header), timeout_ms, err);
+    if (*err) {
+        return "";
     }
-    return Error{res};
+    *err = nullptr;
+    return response;
 }
 
 }
\ No newline at end of file
diff --git a/producer/api/cpp/src/producer_impl.h b/producer/api/cpp/src/producer_impl.h
index 35d0ff87f3a516ba2bc822331bcd8942da5179ce..018db26be2668e3917b4ff304e9042315aaf4cca 100644
--- a/producer/api/cpp/src/producer_impl.h
+++ b/producer/api/cpp/src/producer_impl.h
@@ -67,12 +67,26 @@ class ProducerImpl : public Producer {
     Error SetCredentials(SourceCredentials source_cred) override;
 
     Error SendMetadata(const std::string& metadata, RequestCallback callback) override;
+    Error SendBeamtimeMetadata(const std::string& metadata, MetaIngestMode mode, RequestCallback callback) override;
+    Error SendStreamMetadata(const std::string& metadata,
+                             MetaIngestMode mode,
+                             const std::string& stream,
+                             RequestCallback callback) override;
+
     uint64_t GetRequestsQueueSize() override;
     Error WaitRequestsFinished(uint64_t timeout_ms) override;
     uint64_t GetRequestsQueueVolumeMb() override;
     void SetRequestsQueueLimits(uint64_t size, uint64_t volume) override;
+    std::string GetStreamMeta(const std::string& stream, uint64_t timeout_ms, Error* err) const override;
+    std::string GetBeamtimeMeta(uint64_t timeout_ms, Error* err) const override;
+
   private:
+    Error SendMeta(const std::string& metadata,
+                   MetaIngestMode mode,
+                   std::string stream,
+                   RequestCallback callback);
     StreamInfo StreamRequest(StreamRequestOp op, std::string stream, uint64_t timeout_ms, Error* err) const;
+    std::string BlockingRequest(GenericRequestHeader header, uint64_t timeout_ms, Error* err) const;
     Error Send(const MessageHeader& message_header, std::string stream, MessageData data, std::string full_path,
                uint64_t ingest_mode,
                RequestCallback callback, bool manage_data_memory);
@@ -83,11 +97,13 @@ class ProducerImpl : public Producer {
     std::string endpoint_;
     Error GetServerVersionInfo(std::string* server_info,
                                bool* supported) const;
+    std::string GetMeta(const std::string& stream, uint64_t timeout_ms, Error* err) const;
+
 };
 
-struct StreamInfoResult {
-    StreamInfo sinfo;
-    ErrorInterface* err;
+struct ReceiverResponse {
+    std::string payload;
+    ErrorInterface* err{nullptr};
 };
 
 }
diff --git a/producer/api/cpp/src/producer_request_handler_factory.h b/producer/api/cpp/src/producer_request_handler_factory.h
index e11f003bfba67409417c7173cbfbfe325416b956..0200c7c92e15afb27836eddfb5af7de72572e4e2 100644
--- a/producer/api/cpp/src/producer_request_handler_factory.h
+++ b/producer/api/cpp/src/producer_request_handler_factory.h
@@ -13,7 +13,8 @@ class ProducerRequestHandlerFactory : public RequestHandlerFactory {
   public:
     ProducerRequestHandlerFactory(ReceiverDiscoveryService* discovery_service);
     ProducerRequestHandlerFactory(std::string destination_folder);
-    VIRTUAL std::unique_ptr<RequestHandler> NewRequestHandler(uint64_t thread_id, uint64_t* shared_counter) override;
+    virtual std::unique_ptr<RequestHandler> NewRequestHandler(uint64_t thread_id, uint64_t* shared_counter) override;
+    virtual ~ProducerRequestHandlerFactory() { };
   private:
     RequestHandlerType type_;
     ReceiverDiscoveryService* discovery_service_{nullptr};
diff --git a/producer/api/cpp/unittests/test_producer_impl.cpp b/producer/api/cpp/unittests/test_producer_impl.cpp
index 233511dc7e3f8dbb445c6c49a63625211cb70383..216ac1a73ff0046adee313392c71dcc5b22391d4 100644
--- a/producer/api/cpp/unittests/test_producer_impl.cpp
+++ b/producer/api/cpp/unittests/test_producer_impl.cpp
@@ -6,6 +6,7 @@
 #include "asapo/producer/common.h"
 #include "../src/producer_impl.h"
 #include "asapo/producer/producer_error.h"
+#include "asapo/common/internal/version.h"
 
 #include "../src/request_handler_tcp.h"
 #include "asapo/request/request_pool_error.h"
@@ -39,19 +40,25 @@ MATCHER_P10(M_CheckSendRequest, op_code, source_credentials, metadata, file_id,
             dataset_size,
             "Checks if a valid GenericRequestHeader was Send") {
     auto request = static_cast<ProducerRequest*>(arg);
-    return ((asapo::GenericRequestHeader) (arg->header)).op_code == op_code
-           && ((asapo::GenericRequestHeader) (arg->header)).data_id == file_id
-           && ((asapo::GenericRequestHeader) (arg->header)).data_size == uint64_t(file_size)
+    return ((asapo::GenericRequestHeader) (request->header)).op_code == op_code
+           && ((asapo::GenericRequestHeader) (request->header)).data_id == file_id
+           && ((asapo::GenericRequestHeader) (request->header)).data_size == uint64_t(file_size)
            && request->manage_data_memory == true
            && request->source_credentials == source_credentials
            && request->metadata == metadata
-           && (op_code == asapo::kOpcodeTransferDatasetData ? ((asapo::GenericRequestHeader) (arg->header)).custom_data[1]
+           && (op_code == asapo::kOpcodeTransferMetaData ? ((asapo::GenericRequestHeader) (request->header)).custom_data[1]
+               == uint64_t(dataset_id) : true)
+           && (op_code == asapo::kOpcodeTransferDatasetData ? ((asapo::GenericRequestHeader) (request->header)).custom_data[1]
                == uint64_t(dataset_id) : true)
-           && (op_code == asapo::kOpcodeTransferDatasetData ? ((asapo::GenericRequestHeader) (arg->header)).custom_data[2]
+           && (op_code == asapo::kOpcodeTransferDatasetData ? ((asapo::GenericRequestHeader) (request->header)).custom_data[2]
                == uint64_t(dataset_size) : true)
-           && ((asapo::GenericRequestHeader) (arg->header)).custom_data[asapo::kPosIngestMode] == uint64_t(ingest_mode)
-           && strcmp(((asapo::GenericRequestHeader) (arg->header)).message, message) == 0
-           && strcmp(((asapo::GenericRequestHeader) (arg->header)).stream, stream) == 0;
+           && ((asapo::GenericRequestHeader) (request->header)).custom_data[asapo::kPosIngestMode] == uint64_t(ingest_mode)
+           && strcmp(((asapo::GenericRequestHeader) (request->header)).message, message) == 0
+           && strcmp(((asapo::GenericRequestHeader) (request->header)).api_version,
+                     asapo::kProducerProtocol.GetReceiverVersion().c_str()) == 0
+           && strcmp(((asapo::GenericRequestHeader) (request->header)).stream, stream) == 0;
+
+
 }
 
 TEST(ProducerImpl, Constructor) {
@@ -287,7 +294,7 @@ TEST_F(ProducerImplTests, OKSendingSendDatasetDataRequest) {
     ASSERT_THAT(err, Eq(nullptr));
 }
 
-TEST_F(ProducerImplTests, OKAddingSendMetaDataRequest) {
+TEST_F(ProducerImplTests, OKAddingSendMetaDataRequestOld) {
     expected_id = 0;
     expected_metadata = "{\"meta\":10}";
     expected_size = expected_metadata.size();
@@ -302,15 +309,66 @@ TEST_F(ProducerImplTests, OKAddingSendMetaDataRequest) {
                                         "beamtime_global.meta",
                                         "",
                                         expected_ingest_mode,
-                                        10,
+                                        12,
                                         10), false)).WillOnce(Return(
                                                     nullptr));
 
     auto err = producer.SendMetadata(expected_metadata, nullptr);
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+TEST_F(ProducerImplTests, OKAddingSendMetaDataRequest) {
+    expected_id = 0;
+    expected_metadata = "{\"meta\":10}";
+    expected_size = expected_metadata.size();
+    expected_ingest_mode = asapo::IngestModeFlags::kTransferData | asapo::IngestModeFlags::kStoreInDatabase ;
+
+    producer.SetCredentials(expected_credentials);
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckSendRequest(asapo::kOpcodeTransferMetaData,
+                                        expected_credentials_str,
+                                        "",
+                                        expected_id,
+                                        expected_size,
+                                        "beamtime_global.meta",
+                                        "",
+                                        expected_ingest_mode,
+                                        12,
+                                        10), false)).WillOnce(Return(
+                                                    nullptr));
 
+    auto mode = asapo::MetaIngestMode{asapo::MetaIngestOp::kReplace, true};
+    auto err = producer.SendBeamtimeMetadata(expected_metadata, mode, nullptr);
     ASSERT_THAT(err, Eq(nullptr));
 }
 
+TEST_F(ProducerImplTests, OKAddingSendStreamDataRequest) {
+    expected_id = 0;
+    expected_metadata = "{\"meta\":10}";
+    expected_size = expected_metadata.size();
+    expected_ingest_mode = asapo::IngestModeFlags::kTransferData | asapo::IngestModeFlags::kStoreInDatabase ;
+
+    std::string expected_message = (std::string(expected_stream) + ".meta");
+    producer.SetCredentials(expected_credentials);
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckSendRequest(asapo::kOpcodeTransferMetaData,
+                                        expected_credentials_str,
+                                        "",
+                                        expected_id,
+                                        expected_size,
+                                        expected_message.c_str(),
+                                        expected_stream,
+                                        expected_ingest_mode,
+                                        1,
+                                        10), false)).WillOnce(Return(
+                                                    nullptr));
+
+
+    auto mode = asapo::MetaIngestMode{asapo::MetaIngestOp::kInsert, false};
+    auto err = producer.SendStreamMetadata(expected_metadata, mode, expected_stream, nullptr);
+
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+
 TEST_F(ProducerImplTests, ErrorSendingEmptyFileName) {
     producer.SetCredentials(expected_credentials);
 
@@ -443,7 +501,7 @@ TEST_F(ProducerImplTests, WaitRequestsFinished) {
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
 }
 
-MATCHER_P3(M_CheckGetStreamInfoRequest, op_code, source_credentials, stream,
+MATCHER_P3(M_CheckGetRequest, op_code, source_credentials, stream,
            "Checks if a valid GenericRequestHeader was Send") {
     auto request = static_cast<ProducerRequest*>(arg);
     return ((asapo::GenericRequestHeader) (arg->header)).op_code == op_code
@@ -451,9 +509,9 @@ MATCHER_P3(M_CheckGetStreamInfoRequest, op_code, source_credentials, stream,
            && strcmp(((asapo::GenericRequestHeader) (arg->header)).stream, stream) == 0;
 }
 
-TEST_F(ProducerImplTests, GetStreamInfoMakesCorerctRequest) {
+TEST_F(ProducerImplTests, GetStreamInfoMakesCorrectRequest) {
     producer.SetCredentials(expected_credentials);
-    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetStreamInfoRequest(asapo::kOpcodeStreamInfo,
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetRequest(asapo::kOpcodeStreamInfo,
                                         expected_credentials_str,
                                         expected_stream), true)).WillOnce(
                                             Return(nullptr));
@@ -479,9 +537,9 @@ TEST(GetStreamInfoTest, GetStreamInfoTimeout) {
     ASSERT_THAT(err->Explain(), HasSubstr("opcode: 4"));
 }
 
-TEST_F(ProducerImplTests, GetLastStreamMakesCorerctRequest) {
+TEST_F(ProducerImplTests, GetLastStreamMakesCorrectRequest) {
     producer.SetCredentials(expected_credentials);
-    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetStreamInfoRequest(asapo::kOpcodeLastStream,
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetRequest(asapo::kOpcodeLastStream,
                                         expected_credentials_str,
                                         ""), true)).WillOnce(
                                             Return(nullptr));
@@ -547,7 +605,7 @@ MATCHER_P4(M_CheckDeleteStreamRequest, op_code, source_credentials, stream, flag
            && strcmp(((asapo::GenericRequestHeader) (arg->header)).stream, stream) == 0;
 }
 
-TEST_F(ProducerImplTests, DeleteStreamMakesCorerctRequest) {
+TEST_F(ProducerImplTests, DeleteStreamMakesCorrectRequest) {
     producer.SetCredentials(expected_credentials);
     asapo::DeleteStreamOptions expected_options{};
     expected_options.delete_meta = true;
@@ -564,4 +622,29 @@ TEST_F(ProducerImplTests, DeleteStreamMakesCorerctRequest) {
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
 }
 
+TEST_F(ProducerImplTests, GetStreamMetaMakesCorrectRequest) {
+    producer.SetCredentials(expected_credentials);
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetRequest(asapo::kOpcodeGetMeta,
+                                        expected_credentials_str,
+                                        expected_stream), true)).WillOnce(
+                                            Return(nullptr));
+
+    asapo::Error err;
+    producer.GetStreamMeta(expected_stream, 1000, &err);
+    ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
+}
+
+
+TEST_F(ProducerImplTests, GetBeamtimeMetaMakesCorrectRequest) {
+    producer.SetCredentials(expected_credentials);
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckGetRequest(asapo::kOpcodeGetMeta,
+                                        expected_credentials_str,
+                                        ""), true)).WillOnce(
+                                            Return(nullptr));
+
+    asapo::Error err;
+    producer.GetBeamtimeMeta(1000, &err);
+    ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
+}
+
 }
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index f78491a2fe48610944078ed7dc8d0996da170b97..e50f762bc05d4b47cb920d36cc6e58e4a7f2d764 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -46,6 +46,15 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   LogLevel LogLevel_Debug "asapo::LogLevel::Debug"
   LogLevel LogLevel_Warning "asapo::LogLevel::Warning"
 
+cdef extern from "asapo/asapo_producer.h" namespace "asapo":
+  cppclass MetaIngestOp:
+    pass
+  MetaIngestOp kInsert "asapo::MetaIngestOp::kInsert"
+  MetaIngestOp kReplace "asapo::MetaIngestOp::kReplace"
+  MetaIngestOp kUpdate "asapo::MetaIngestOp::kUpdate"
+  struct MetaIngestMode:
+    MetaIngestOp op
+    bool upsert
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   cppclass SourceType:
@@ -110,7 +119,10 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo" nogil:
         StreamInfo GetLastStream(uint64_t timeout_ms, Error* err)
         Error GetVersionInfo(string* client_info,string* server_info, bool* supported)
         Error DeleteStream(string stream, uint64_t timeout_ms, DeleteStreamOptions options)
-
+        Error SendBeamtimeMetadata(string metadata, MetaIngestMode mode, RequestCallback callback)
+        Error SendStreamMetadata(string metadata, MetaIngestMode mode, string stream, RequestCallback callback)
+        string GetStreamMeta(string stream, uint64_t timeout_ms, Error* err)
+        string GetBeamtimeMeta(uint64_t timeout_ms, Error* err)
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
     uint64_t kDefaultIngestMode
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index 24d97cb5c3b5a51ed5b3ce05b2e529beaa9f6de4..f681fd636d495403c55920bb580428332bfb8b27 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -34,6 +34,15 @@ cdef bytes _bytes(s):
 
     else:
         raise TypeError("Could not convert to unicode.")
+cdef MetaIngestOp mode_to_c(mode):
+    if mode == 'replace':
+        return kReplace
+    elif mode == 'update':
+        return kUpdate
+    elif mode == 'insert':
+        return kInsert
+    else:
+        raise TypeError("Could not convert to unicode.")
 
 
 class AsapoProducerError(Exception):
@@ -174,7 +183,54 @@ cdef class PyProducer:
         if callback != None:
             Py_XINCREF(<PyObject*>callback)
         return
-
+    def send_stream_meta(self, metadata, mode = 'replace', upsert = True, stream='default', callback=None):
+        """
+         :param stream: stream name, default "default"
+         :type stream: string
+         :param metadata: beamtime metadata in JSON format
+         :type metadata: string
+         :param mode: send mode
+         :type mode: string
+         :param upsert: send mode
+         :type upsert: bool
+         :param callback: callback function, default None
+         :type callback: callback(info,err), where info - json string with event header that was used to send data and response, err - error string or None
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoProducerError: other error
+        """
+        cdef MetaIngestMode mode_c
+        mode_c.op = mode_to_c(mode)
+        mode_c.upsert = upsert
+        err = self.c_producer.get().SendStreamMetadata(_bytes(metadata), mode_c,_bytes(stream),
+              unwrap_callback(<RequestCallbackCython>self.c_callback, <void*>self,<void*>callback if callback != None else NULL))
+        if err:
+            throw_exception(err)
+        if callback != None:
+            Py_XINCREF(<PyObject*>callback)
+    def send_beamtime_meta(self, metadata, mode = 'replace', upsert = True, callback=None):
+        """
+         :param metadata: beamtime metadata in JSON format
+         :type metadata: string
+         :param mode: send mode
+         :type mode: string
+         :param upsert: send mode
+         :type upsert: bool
+         :param callback: callback function, default None
+         :type callback: callback(info,err), where info - json string with event header that was used to send data and response, err - error string or None
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoProducerError: other error
+        """
+        cdef MetaIngestMode mode_c
+        mode_c.op = mode_to_c(mode)
+        mode_c.upsert = upsert
+        err = self.c_producer.get().SendBeamtimeMetadata(_bytes(metadata), mode_c,
+              unwrap_callback(<RequestCallbackCython>self.c_callback, <void*>self,<void*>callback if callback != None else NULL))
+        if err:
+            throw_exception(err)
+        if callback != None:
+            Py_XINCREF(<PyObject*>callback)
     def send(self, uint64_t id, exposed_path, data, user_meta=None, dataset=None, ingest_mode = DEFAULT_INGEST_MODE, stream = "default", callback=None):
         """
          :param id: unique data id
@@ -244,6 +300,41 @@ cdef class PyProducer:
             err = self.c_producer.get().DeleteStream(b_stream,timeout_ms,opts)
         if err:
             throw_exception(err)
+    def get_stream_meta(self,stream = 'default', uint64_t timeout_ms = 1000):
+        """
+         :param stream: stream name
+         :type stream: string
+         :param timeout_ms: timeout in milliseconds
+         :type timeout_ms: int
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoTimeoutError: request not finished for a given timeout
+            AsapoProducerError: other errors
+        """
+        cdef Error err
+        cdef string res
+        cdef string b_stream = _bytes(stream)
+        with nogil:
+            res = self.c_producer.get().GetStreamMeta(b_stream,timeout_ms,&err)
+        if err:
+            throw_exception(err)
+        return json.loads(_str(res) or 'null')
+    def get_beamtime_meta(self, uint64_t timeout_ms = 1000):
+        """
+         :param timeout_ms: timeout in milliseconds
+         :type timeout_ms: int
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoTimeoutError: request not finished for a given timeout
+            AsapoProducerError: other errors
+        """
+        cdef Error err
+        cdef string res
+        with nogil:
+            res = self.c_producer.get().GetBeamtimeMeta(timeout_ms,&err)
+        if err:
+            throw_exception(err)
+        return json.loads(_str(res) or 'null')
     def stream_info(self, stream = 'default', uint64_t timeout_ms = 1000):
         """
          :param stream: stream name
diff --git a/producer/api/python/dist_linux/CMakeLists.txt b/producer/api/python/dist_linux/CMakeLists.txt
index 1c33dcf6deb8c6e73d6ffef654dcc2ad68bd5490..578201ad36272f1efa751a9a1a290cfbc29feb83 100644
--- a/producer/api/python/dist_linux/CMakeLists.txt
+++ b/producer/api/python/dist_linux/CMakeLists.txt
@@ -51,6 +51,7 @@ ADD_CUSTOM_TARGET(copy_python_dist-producer ALL
         COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/../asapo_producer.cpp ${CMAKE_CURRENT_BINARY_DIR}/.
         COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/producer/api/cpp/include ${CMAKE_CURRENT_BINARY_DIR}/include
         COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/common ${CMAKE_CURRENT_BINARY_DIR}/include/asapo/common
+		COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/preprocessor ${CMAKE_CURRENT_BINARY_DIR}/include/asapo/preprocessor
         COMMAND ${CMAKE_COMMAND} -E remove ${CMAKE_CURRENT_BINARY_DIR}/include/common/version.h.in
 	COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/logger ${CMAKE_CURRENT_BINARY_DIR}/include/asapo/logger
         COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/lib
diff --git a/receiver/CMakeLists.txt b/receiver/CMakeLists.txt
index 4a18b189606b27a371224d9a12dead194b711ed8..b743d6131194fc789e4bae3c4208efe9a2d1aa83 100644
--- a/receiver/CMakeLists.txt
+++ b/receiver/CMakeLists.txt
@@ -22,6 +22,7 @@ set(RECEIVER_CORE_FILES
         src/request_handler/request_handler_receive_metadata.cpp
         src/request_handler/request_handler_db_check_request.cpp
         src/request_handler/request_handler_db_delete_stream.cpp
+        src/request_handler/request_handler_db_get_meta.cpp
         src/request_handler/request_factory.cpp
         src/request_handler/request_handler_db.cpp
         src/file_processors/write_file_processor.cpp
@@ -99,6 +100,7 @@ set(TEST_SOURCE_FILES
         unittests/request_handler/test_request_handler_receive_data.cpp
         unittests/request_handler/test_request_handler_receive_metadata.cpp
         unittests/request_handler/test_request_handler_delete_stream.cpp
+        unittests/request_handler/test_request_handler_db_get_meta.cpp
         unittests/statistics/test_statistics_sender_influx_db.cpp
         unittests/statistics/test_statistics_sender_fluentd.cpp
         unittests/mock_receiver_config.cpp
diff --git a/receiver/src/request_handler/request_factory.cpp b/receiver/src/request_handler/request_factory.cpp
index 4279f485adb563261fd899f9d32cf5fbdeee89db..39a2737076ab9819947bc1ecc7afb73417d46350 100644
--- a/receiver/src/request_handler/request_factory.cpp
+++ b/receiver/src/request_handler/request_factory.cpp
@@ -91,6 +91,10 @@ Error RequestFactory::AddHandlersToRequest(std::unique_ptr<Request>& request,
         request->AddHandler(&request_handler_db_last_stream_);
         break;
     }
+    case Opcode::kOpcodeGetMeta: {
+        request->AddHandler(&request_handler_db_get_meta_);
+        break;
+    }
     default:
         return ReceiverErrorTemplates::kInvalidOpCode.Generate();
     }
diff --git a/receiver/src/request_handler/request_factory.h b/receiver/src/request_handler/request_factory.h
index ee371d5aca70e23200079f9a6a3ce5072622cb34..a44d7efaff638a9abe2ecd633675481b6244baaf 100644
--- a/receiver/src/request_handler/request_factory.h
+++ b/receiver/src/request_handler/request_factory.h
@@ -7,6 +7,7 @@
 #include "request_handler_db_stream_info.h"
 #include "request_handler_db_last_stream.h"
 #include "request_handler_db_delete_stream.h"
+#include "request_handler_db_get_meta.h"
 
 namespace asapo {
 
@@ -29,6 +30,7 @@ class RequestFactory {
     RequestHandlerDbDeleteStream request_handler_delete_stream_{kDBDataCollectionNamePrefix};
     RequestHandlerDbLastStream request_handler_db_last_stream_{kDBDataCollectionNamePrefix};
     RequestHandlerDbMetaWrite request_handler_db_meta_write_{kDBMetaCollectionName};
+    RequestHandlerDbGetMeta request_handler_db_get_meta_{kDBMetaCollectionName};
     RequestHandlerAuthorize request_handler_authorize_;
     RequestHandlerDbCheckRequest request_handler_db_check_{kDBDataCollectionNamePrefix};;
     SharedCache cache_;
diff --git a/receiver/src/request_handler/request_handler_db_get_meta.cpp b/receiver/src/request_handler/request_handler_db_get_meta.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..b046aa1195d0eb7511983370266895c56b45245c
--- /dev/null
+++ b/receiver/src/request_handler/request_handler_db_get_meta.cpp
@@ -0,0 +1,38 @@
+#include "request_handler_db_get_meta.h"
+#include "../receiver_config.h"
+#include <asapo/database/db_error.h>
+
+namespace asapo {
+
+RequestHandlerDbGetMeta::RequestHandlerDbGetMeta(std::string collection_name_prefix) : RequestHandlerDb(
+        std::move(collection_name_prefix)) {
+}
+
+Error RequestHandlerDbGetMeta::ProcessRequest(Request* request) const {
+    if (auto err = RequestHandlerDb::ProcessRequest(request) ) {
+        return err;
+    }
+
+    auto stream_name = request->GetStream();
+
+    std::string metaid = stream_name.empty() ? "bt" : "st_" + stream_name;
+    std::string meta;
+    auto err =  db_client__->GetMetaFromDb(kDBMetaCollectionName, metaid, &meta);
+
+    bool no_error = err == nullptr;
+    if (err == DBErrorTemplates::kNoRecord) {
+        no_error = true;
+    }
+
+    if (no_error) {
+        log__->Debug(std::string{"get meta for "} + (stream_name.empty() ? "beamtime" : stream_name) + " in " +
+                     db_name_ + " at " + GetReceiverConfig()->database_uri);
+        request->SetResponseMessage(meta, ResponseMessageType::kInfo);
+        return nullptr;
+    }
+
+    return DBErrorToReceiverError(err);
+}
+
+
+}
\ No newline at end of file
diff --git a/receiver/src/request_handler/request_handler_db_get_meta.h b/receiver/src/request_handler/request_handler_db_get_meta.h
new file mode 100644
index 0000000000000000000000000000000000000000..faeddc5f6b942a05fcae8354e19533a1469c763b
--- /dev/null
+++ b/receiver/src/request_handler/request_handler_db_get_meta.h
@@ -0,0 +1,18 @@
+#include "request_handler_db.h"
+#include "../request.h"
+
+#ifndef ASAPO_RECEIVER_SRC_REQUEST_HANDLER_REQUEST_HANDLER_DB_GET_META_H_
+#define ASAPO_RECEIVER_SRC_REQUEST_HANDLER_REQUEST_HANDLER_DB_GET_META_H_
+
+namespace asapo {
+
+class RequestHandlerDbGetMeta final: public RequestHandlerDb {
+  public:
+    RequestHandlerDbGetMeta(std::string collection_name_prefix);
+    Error ProcessRequest(Request* request) const override;
+};
+
+}
+
+
+#endif //ASAPO_RECEIVER_SRC_REQUEST_HANDLER_REQUEST_HANDLER_DB_GET_META_H_
diff --git a/receiver/src/request_handler/request_handler_db_meta_write.cpp b/receiver/src/request_handler/request_handler_db_meta_write.cpp
index 3f1c89f0ff175626a3ad1a3cda47ece8ee32daf2..c1b096af12fcb93eeb2108f5b6b0a3602ef1672a 100644
--- a/receiver/src/request_handler/request_handler_db_meta_write.cpp
+++ b/receiver/src/request_handler/request_handler_db_meta_write.cpp
@@ -3,7 +3,7 @@
 #include "../receiver_config.h"
 #include "../receiver_logger.h"
 #include "asapo/io/io_factory.h"
-
+#include "asapo/common/internal/version.h"
 
 namespace asapo {
 
@@ -14,13 +14,31 @@ Error RequestHandlerDbMetaWrite::ProcessRequest(Request* request) const {
 
     auto size = request->GetDataSize();
     auto meta = (uint8_t*)request->GetData();
-    auto meta_id = request->GetDataID();
+    auto api_version = VersionToNumber(request->GetApiVersion());
+
+    std::string stream;
+    MetaIngestMode mode;
+    if (api_version < 3) {
+        // old approach, deprecates 01.07.2022
+        mode.op = MetaIngestOp::kReplace;
+        mode.upsert = true;
+    } else {
+        stream = request->GetStream();
+        mode.Decode(request->GetCustomData()[kPosMetaIngestMode]);
+    }
 
-    auto err =  db_client__->Upsert(collection_name_prefix_, meta_id, meta, size);
+    auto err =  db_client__->InsertMeta(collection_name_prefix_, stream.empty() ? "bt" : "st_" + stream, meta, size, mode);
     if (!err) {
-        log__->Debug(std::string{"insert beamtime meta"} + " to " + collection_name_prefix_ + " in " +
-                     db_name_ +
-                     " at " + GetReceiverConfig()->database_uri);
+        if (stream.empty()) {
+            log__->Debug(std::string{"insert beamtime meta"} + " to " + collection_name_prefix_ + " in " +
+                         db_name_ +
+                         " at " + GetReceiverConfig()->database_uri);
+        } else {
+            log__->Debug(std::string{"insert stream meta for "} +stream + " to " + collection_name_prefix_ + " in " +
+                         db_name_ +
+                         " at " + GetReceiverConfig()->database_uri);
+        }
+
     }
     return DBErrorToReceiverError(err);
 }
diff --git a/receiver/unittests/request_handler/test_request_factory.cpp b/receiver/unittests/request_handler/test_request_factory.cpp
index 03d8bf4495f279493a1c9b90874c4a79b0e73ae8..64029d2ee34c89dd6071bce0a13aff4730d4bc0d 100644
--- a/receiver/unittests/request_handler/test_request_factory.cpp
+++ b/receiver/unittests/request_handler/test_request_factory.cpp
@@ -16,6 +16,7 @@
 #include "../../src/request_handler/request_handler_db_stream_info.h"
 #include "../../src/request_handler/request_handler_db_last_stream.h"
 #include "../../src/request_handler/request_handler_db_delete_stream.h"
+#include "../../src/request_handler/request_handler_db_get_meta.h"
 
 #include "../../src/request_handler/request_handler_receive_data.h"
 #include "../../src/request_handler/request_handler_receive_metadata.h"
@@ -225,5 +226,16 @@ TEST_F(FactoryTests, DeleteStreamRequest) {
     ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDbDeleteStream*>(request->GetListHandlers()[1]), Ne(nullptr));
 }
 
+TEST_F(FactoryTests, GetMetamRequest) {
+    generic_request_header.op_code = asapo::Opcode::kOpcodeGetMeta;
+    auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(request->GetListHandlers().size(), Eq(2));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDbGetMeta*>(request->GetListHandlers()[1]), Ne(nullptr));
+}
+
+
+
 
 }
diff --git a/receiver/unittests/request_handler/test_request_handler_db_get_meta.cpp b/receiver/unittests/request_handler/test_request_handler_db_get_meta.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..bfb4a89e527069cdf111d72c815539d8be11b84c
--- /dev/null
+++ b/receiver/unittests/request_handler/test_request_handler_db_get_meta.cpp
@@ -0,0 +1,128 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+#include <asapo/database/db_error.h>
+
+#include "asapo/unittests/MockIO.h"
+#include "asapo/unittests/MockDatabase.h"
+#include "asapo/unittests/MockLogger.h"
+
+#include "../../src/receiver_error.h"
+#include "../../src/request.h"
+#include "../../src/request_handler/request_factory.h"
+#include "../../src/request_handler/request_handler.h"
+#include "../../src/request_handler/request_handler_db_get_meta.h"
+#include "../../../common/cpp/src/database/mongodb_client.h"
+
+#include "../mock_receiver_config.h"
+#include "asapo/common/data_structs.h"
+#include "asapo/common/networking.h"
+#include "../receiver_mocking.h"
+
+using asapo::MockRequest;
+using asapo::MessageMeta;
+using ::testing::Test;
+using ::testing::Return;
+using ::testing::ReturnRef;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::SetArgReferee;
+using ::testing::Gt;
+using ::testing::Eq;
+using ::testing::Ne;
+using ::testing::Mock;
+using ::testing::NiceMock;
+using ::testing::InSequence;
+using ::testing::SetArgPointee;
+using ::testing::AllOf;
+using ::testing::HasSubstr;
+
+
+using ::asapo::Error;
+using ::asapo::ErrorInterface;
+using ::asapo::FileDescriptor;
+using ::asapo::SocketDescriptor;
+using ::asapo::MockIO;
+using asapo::Request;
+using asapo::RequestHandlerDbGetMeta;
+using ::asapo::GenericRequestHeader;
+
+using asapo::MockDatabase;
+using asapo::RequestFactory;
+using asapo::SetReceiverConfig;
+using asapo::ReceiverConfig;
+
+
+namespace {
+
+class DbMetaGetMetaTests : public Test {
+  public:
+    RequestHandlerDbGetMeta handler{asapo::kDBDataCollectionNamePrefix};
+    std::unique_ptr<NiceMock<MockRequest>> mock_request;
+    NiceMock<MockDatabase> mock_db;
+    NiceMock<asapo::MockLogger> mock_logger;
+    ReceiverConfig config;
+    std::string expected_beamtime_id = "beamtime_id";
+    std::string expected_data_source = "source";
+    std::string expected_stream = "stream";
+    std::string expected_meta = "meta";
+    void SetUp() override {
+        GenericRequestHeader request_header;
+        handler.db_client__ = std::unique_ptr<asapo::Database> {&mock_db};
+        handler.log__ = &mock_logger;
+        mock_request.reset(new NiceMock<MockRequest> {request_header, 1, "", nullptr});
+        ON_CALL(*mock_request, GetBeamtimeId()).WillByDefault(ReturnRef(expected_beamtime_id));
+    }
+    void TearDown() override {
+        handler.db_client__.release();
+    }
+    void ExpectGet(bool stream, const asapo::DBErrorTemplate* errorTemplate) {
+        SetReceiverConfig(config, "none");
+        EXPECT_CALL(*mock_request, GetDataSource()).WillOnce(ReturnRef(expected_data_source));
+        if (stream) {
+            EXPECT_CALL(*mock_request, GetStream()).WillOnce(Return(expected_stream));
+        }
+
+        EXPECT_CALL(mock_db, Connect_t(config.database_uri, expected_beamtime_id + "_" + expected_data_source)).
+        WillOnce(testing::Return(nullptr));
+        EXPECT_CALL(mock_db, GetMetaFromDb_t("meta", stream ? "st_" + expected_stream : "bt", _)).
+        WillOnce(DoAll(
+                     SetArgPointee<2>(expected_meta),
+                     testing::Return(errorTemplate == nullptr ? nullptr : errorTemplate->Generate().release())
+                 ));
+        if (errorTemplate == nullptr) {
+            EXPECT_CALL(*mock_request, SetResponseMessage(expected_meta, asapo::ResponseMessageType::kInfo));
+            EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("meta"),
+                                                 HasSubstr(config.database_uri),
+                                                 HasSubstr(expected_data_source),
+                                                 HasSubstr(stream ? expected_stream : "beamtime"),
+                                                 HasSubstr(expected_beamtime_id)
+                                                )
+                                          )
+                       );
+        }
+
+    }
+};
+
+
+
+TEST_F(DbMetaGetMetaTests, GetBeamtimeMetaOk) {
+    ExpectGet(false, nullptr);
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+
+TEST_F(DbMetaGetMetaTests, GetStreamMetaOk) {
+    ExpectGet(true, nullptr);
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+TEST_F(DbMetaGetMetaTests, GetStreamMetaError) {
+    ExpectGet(true, &asapo::DBErrorTemplates::kDBError);
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Ne(nullptr));
+}
+
+}
diff --git a/receiver/unittests/request_handler/test_request_handler_db_meta_writer.cpp b/receiver/unittests/request_handler/test_request_handler_db_meta_writer.cpp
index ef677673aa55abcf99bb13dee9b078c796652c4b..56d3ad7a8be794d60d3bc814bc39becda3ac63a2 100644
--- a/receiver/unittests/request_handler/test_request_handler_db_meta_writer.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_db_meta_writer.cpp
@@ -15,6 +15,7 @@
 #include "../mock_receiver_config.h"
 #include "asapo/common/data_structs.h"
 #include "asapo/common/networking.h"
+#include "asapo/common/internal/version.h"
 #include "../receiver_mocking.h"
 
 using asapo::MockRequest;
@@ -61,13 +62,15 @@ class DbMetaWriterHandlerTests : public Test {
     NiceMock<MockDatabase> mock_db;
     NiceMock<asapo::MockLogger> mock_logger;
     ReceiverConfig config;
-    std::string expected_beamtime_id = "beamtime_id";
     std::string expected_data_source = "source";
+    std::string expected_beamtime_id = "beamtime";
+    std::string expected_stream = "stream";
     std::string meta_str =
         R"("info":"stat","int_data":0,"float_data":0.1,"bool":false)";
     const uint8_t* expected_meta = reinterpret_cast<const uint8_t*>(meta_str.c_str());
     uint64_t expected_meta_size = meta_str.size();
-    uint64_t expected_meta_id = 0;
+    std::string expected_meta_name = "bt";
+    uint64_t expected_custom_data[asapo::kNCustomParams] {0, 0, 0};
     void SetUp() override {
         GenericRequestHeader request_header;
         request_header.data_id = 0;
@@ -80,22 +83,21 @@ class DbMetaWriterHandlerTests : public Test {
         handler.db_client__.release();
     }
 
+    void ExpectRequestParams(const std::string& ver, uint64_t mode, const std::string& stream);
+
 
 };
 
+MATCHER_P(M_CheckIngestMode, mode, "") {
+    return arg.op ==  mode.op && arg.upsert == mode.upsert;
+}
 
-TEST_F(DbMetaWriterHandlerTests, CallsUpdate) {
-    SetReceiverConfig(config, "none");
-
-    EXPECT_CALL(*mock_request, GetBeamtimeId())
-    .WillOnce(ReturnRef(expected_beamtime_id))
-    ;
 
+void DbMetaWriterHandlerTests::ExpectRequestParams(const std::string& ver, uint64_t mode, const std::string& stream) {
     EXPECT_CALL(*mock_request, GetDataSource())
     .WillOnce(ReturnRef(expected_data_source))
     ;
 
-
     EXPECT_CALL(mock_db, Connect_t(config.database_uri, expected_beamtime_id + "_" + expected_data_source)).
     WillOnce(testing::Return(nullptr));
 
@@ -108,7 +110,28 @@ TEST_F(DbMetaWriterHandlerTests, CallsUpdate) {
     .WillOnce(Return((void*)expected_meta))
     ;
 
-    EXPECT_CALL(mock_db, Upsert_t(expected_collection_name, expected_meta_id, expected_meta, expected_meta_size)).
+    EXPECT_CALL(*mock_request, GetApiVersion())
+    .WillOnce(Return(ver))
+    ;
+
+    if (mode > 0) {
+        EXPECT_CALL(*mock_request, GetStream())
+        .WillOnce(Return(stream))
+        ;
+        expected_custom_data[asapo::kPosMetaIngestMode] = mode;
+        EXPECT_CALL(*mock_request, GetCustomData_t()).WillOnce(Return(expected_custom_data));
+    }
+}
+
+TEST_F(DbMetaWriterHandlerTests, CallsIngestBeamtimeMetaOldVersion) {
+    SetReceiverConfig(config, "none");
+
+    ExpectRequestParams("v0.2", 0, "");
+
+    asapo::MetaIngestMode expected_mode{asapo::MetaIngestOp::kReplace, true};
+
+    EXPECT_CALL(mock_db, InsertMeta_t(expected_collection_name, "bt", expected_meta, expected_meta_size,
+                                      M_CheckIngestMode(expected_mode))).
     WillOnce(testing::Return(nullptr));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert beamtime meta"),
@@ -122,4 +145,50 @@ TEST_F(DbMetaWriterHandlerTests, CallsUpdate) {
     handler.ProcessRequest(mock_request.get());
 }
 
+TEST_F(DbMetaWriterHandlerTests, CallsIngestBeamtimeMeta) {
+    SetReceiverConfig(config, "none");
+
+    ExpectRequestParams(asapo::GetReceiverApiVersion(), 11, "");
+
+
+    asapo::MetaIngestMode expected_mode{asapo::MetaIngestOp::kInsert, true};
+    EXPECT_CALL(mock_db, InsertMeta_t(expected_collection_name, "bt", expected_meta, expected_meta_size,
+                                      M_CheckIngestMode(expected_mode))).
+    WillOnce(testing::Return(nullptr));
+
+    EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert beamtime meta"),
+                                         HasSubstr(expected_beamtime_id),
+                                         HasSubstr(config.database_uri),
+                                         HasSubstr(expected_collection_name)
+                                        )
+                                  )
+               );
+
+    handler.ProcessRequest(mock_request.get());
+}
+
+TEST_F(DbMetaWriterHandlerTests, CallsIngestStreamMeta) {
+    SetReceiverConfig(config, "none");
+
+    ExpectRequestParams(asapo::GetReceiverApiVersion(), 13, expected_stream);
+
+    asapo::MetaIngestMode expected_mode{asapo::MetaIngestOp::kUpdate, true};
+    EXPECT_CALL(mock_db, InsertMeta_t(expected_collection_name, "st_" + expected_stream, expected_meta, expected_meta_size,
+                                      M_CheckIngestMode(expected_mode))).
+    WillOnce(testing::Return(nullptr));
+
+    EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert stream meta"),
+                                         HasSubstr(expected_beamtime_id),
+                                         HasSubstr(expected_stream),
+                                         HasSubstr(config.database_uri),
+                                         HasSubstr(expected_collection_name)
+                                        )
+                                  )
+               );
+
+    handler.ProcessRequest(mock_request.get());
+}
+
+
+
 }
diff --git a/tests/automatic/broker/get_meta/check_linux.sh b/tests/automatic/broker/get_meta/check_linux.sh
index 100138a7a35b130877e7ce570fd7bd363e4b7c88..c1d9e4d45302696d854bf333747a1945f3c1f4d1 100644
--- a/tests/automatic/broker/get_meta/check_linux.sh
+++ b/tests/automatic/broker/get_meta/check_linux.sh
@@ -11,7 +11,8 @@ Cleanup() {
 	echo "db.dropDatabase()" | mongo ${database_name}
 }
 
-echo 'db.meta.insert({"_id":0,"data":"test"})' | mongo ${database_name}
+echo 'db.meta.insert({"_id":"bt","meta":{"data":"test_bt"}})' | mongo ${database_name}
+echo 'db.meta.insert({"_id":"st_test","meta":{"data":"test_st"}})' | mongo ${database_name}
 
 token=$BT_TEST_TOKEN
 
@@ -19,6 +20,7 @@ broker=`curl --silent 127.0.0.1:8400/asapo-discovery/v0.1/asapo-broker?protocol=
 echo found broker at $broker
 
 
-curl -v  --silent $broker/v0.2/beamtime/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test"'
+curl -v  --silent $broker/v0.2/beamtime/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test_bt"'
+curl -v  --silent $broker/v0.2/beamtime/test/detector/test/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep '"data":"test_st"'
 curl -v  --silent $broker/v0.2/beamtime/test/detector/default/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep 'no documents'
 
diff --git a/tests/automatic/broker/get_meta/check_windows.bat b/tests/automatic/broker/get_meta/check_windows.bat
index d9b79672ac65a61570bac60e1ade22bb3b9a8964..0631edfaec38a1bd5057408d94cd274826d3b378 100644
--- a/tests/automatic/broker/get_meta/check_windows.bat
+++ b/tests/automatic/broker/get_meta/check_windows.bat
@@ -1,7 +1,8 @@
 SET database_name=data_detector
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
 
-echo db.meta.insert({"_id":0}) | %mongo_exe% %database_name%  || goto :error
+echo db.meta.insert({"_id":"bt","meta":{"data":"test_bt"}}) | %mongo_exe% %database_name%  || goto :error
+echo db.meta.insert({"_id":"st_test","meta":{"data":"test_st"}}) | %mongo_exe% %database_name%  || goto :error
 
 curl --silent 127.0.0.1:8400/asapo-discovery/v0.1/asapo-broker?protocol=v0.3 > broker
 set /P broker=< broker
@@ -9,7 +10,8 @@ set /P broker=< broker
 set token=%BT_DATA_TOKEN%
 
 
-C:\Curl\curl.exe -v  --silent %broker%/v0.2/beamtime/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"_id\":0  || goto :error
+C:\Curl\curl.exe -v  --silent %broker%/v0.2/beamtime/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"data\":\"test_bt\"  || goto :error
+C:\Curl\curl.exe -v  --silent %broker%/v0.2/beamtime/data/detector/test/0/meta/1?token=%token% --stderr - | findstr /c:\"data\":\"test_st\"  || goto :error
 C:\Curl\curl.exe -v  --silent %broker%/v0.2/beamtime/data/detector/default/0/meta/1?token=%token% --stderr - | findstr /c:"no documents"  || goto :error
 
 
diff --git a/tests/automatic/consumer/consumer_api/CMakeLists.txt b/tests/automatic/consumer/consumer_api/CMakeLists.txt
index 8276b6c3efa9e59440a081bbad8a2de407a318d5..d9af8868605ed2e0868162df93fae94b06ef8600 100644
--- a/tests/automatic/consumer/consumer_api/CMakeLists.txt
+++ b/tests/automatic/consumer/consumer_api/CMakeLists.txt
@@ -21,5 +21,3 @@ target_link_libraries(${TARGET_NAME_C} test_common asapo-consumer)
 add_script_test("${TARGET_NAME}" "$<TARGET_FILE:${TARGET_NAME}>")
 add_script_test("${TARGET_NAME_C}" "$<TARGET_FILE:${TARGET_NAME_C}>")
 
-
-
diff --git a/tests/automatic/consumer/consumer_api_python/check_linux.sh b/tests/automatic/consumer/consumer_api_python/check_linux.sh
index 85fb840de7746600ec25e512a8af8dc50c0d2919..04ea2c9d197c2e75f3c2cf87fdd85dd14bd2c758 100644
--- a/tests/automatic/consumer/consumer_api_python/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api_python/check_linux.sh
@@ -28,6 +28,10 @@ done
 
 echo 'db.data_streamfts.insert({"_id":'1',"size":0,"name":"'1'","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
 
+echo 'db.meta.insert({"_id":"bt","meta":{"data":"test_bt"}})' | mongo ${database_name}
+echo 'db.meta.insert({"_id":"st_test","meta":{"data":"test_st"}})' | mongo ${database_name}
+
+
 for i in `seq 1 5`;
 do
 	echo 'db.data_stream1.insert({"_id":'$i',"size":6,"name":"'1$i'","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
diff --git a/tests/automatic/consumer/consumer_api_python/check_windows.bat b/tests/automatic/consumer/consumer_api_python/check_windows.bat
index 68824528f2e442d9d63594785785a7e0b38ea9cf..5af06fdd7bb4a7f34d0602642760b2c41bfda75a 100644
--- a/tests/automatic/consumer/consumer_api_python/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api_python/check_windows.bat
@@ -14,6 +14,10 @@ for /l %%x in (1, 1, 5) do echo db.data_default.insert({"_id":%%x,"size":6,"name
 
 echo db.data_streamfts.insert({"_id":1,"size":0,"name":"1","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
 
+echo db.meta.insert({"_id":"bt","meta":{"data":"test_bt"}}) | %mongo_exe% %database_name%  || goto :error
+echo db.meta.insert({"_id":"st_test","meta":{"data":"test_st"}}) | %mongo_exe% %database_name%  || goto :error
+
+
 for /l %%x in (1, 1, 5) do echo db.data_stream1.insert({"_id":%%x,"size":6,"name":"1%%x","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
 
 for /l %%x in (1, 1, 5) do echo db.data_stream2.insert({"_id":%%x,"size":6,"name":"2%%x","timestamp":3000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
diff --git a/tests/automatic/consumer/consumer_api_python/consumer_api.py b/tests/automatic/consumer/consumer_api_python/consumer_api.py
index 54baf5d799b45790d3ca803aeaa3f78927c29ed6..18a8dd3814210a20761dd626bf46325519fdb00c 100644
--- a/tests/automatic/consumer/consumer_api_python/consumer_api.py
+++ b/tests/automatic/consumer/consumer_api_python/consumer_api.py
@@ -232,7 +232,21 @@ def check_single(consumer, group_id):
     else:
         exit_on_noerr("wrong query")
 
-    # delete stream
+    # metadata
+    bt_meta = consumer.get_beamtime_meta()
+    assert_eq(bt_meta['data'], 'test_bt', "beamtime meta ")
+    st_meta = consumer.get_stream_meta("test")
+    assert_eq(st_meta['data'], 'test_st', "stream meta ")
+
+    try:
+        consumer.get_stream_meta("notexist")
+    except asapo_consumer.AsapoNoDataError as err:
+        print(err)
+        pass
+    else:
+        exit_on_noerr("should be wrong input on non existing stream")
+
+# delete stream
 
     consumer.delete_stream(stream='default')
     try:
diff --git a/tests/automatic/mongo_db/CMakeLists.txt b/tests/automatic/mongo_db/CMakeLists.txt
index e024474149ced3beb3179fa80d60a1c13a89891f..9357532e7dd02570819d5839fc5426b9dbb327b5 100644
--- a/tests/automatic/mongo_db/CMakeLists.txt
+++ b/tests/automatic/mongo_db/CMakeLists.txt
@@ -3,5 +3,5 @@ CMAKE_MINIMUM_REQUIRED(VERSION 3.7) # needed for fixtures
 add_subdirectory(connect)
 add_subdirectory(insert_retrieve)
 add_subdirectory(insert_retrieve_dataset)
-add_subdirectory(upsert)
+add_subdirectory(meta)
 
diff --git a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
index c6596be84bff2e03b63d2a50c4e8b271c5cdd622..d21e7051fc7a4d70efb5aec0a0661e088a2187f1 100644
--- a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
+++ b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
@@ -98,7 +98,6 @@ int main(int argc, char* argv[]) {
         err = db_new.GetById(std::string("data_") + stream_name, 0, &fi_db);
         Assert(err, "No record");
 
-
         asapo::StreamInfo info;
 
         err = db.GetStreamInfo(std::string("data_") + stream_name, &info);
diff --git a/tests/automatic/mongo_db/upsert/CMakeLists.txt b/tests/automatic/mongo_db/meta/CMakeLists.txt
similarity index 61%
rename from tests/automatic/mongo_db/upsert/CMakeLists.txt
rename to tests/automatic/mongo_db/meta/CMakeLists.txt
index 67061b280c56ebcb325053298f2450c240551cb8..609cd1581b9f6d943d7599cbb43e2ff002aea0dd 100644
--- a/tests/automatic/mongo_db/upsert/CMakeLists.txt
+++ b/tests/automatic/mongo_db/meta/CMakeLists.txt
@@ -1,5 +1,5 @@
-set(TARGET_NAME upsert_mongodb)
-set(SOURCE_FILES upsert_mongodb.cpp)
+set(TARGET_NAME meta_mongodb)
+set(SOURCE_FILES meta_mongodb.cpp)
 
 
 ################################
@@ -12,9 +12,9 @@ target_include_directories(${TARGET_NAME} PUBLIC ${ASAPO_CXX_COMMON_INCLUDE_DIR}
 ################################
 # Testing
 ################################
-add_integration_test(${TARGET_NAME} upsertOK "OK" "OK")
-add_integration_test(${TARGET_NAME} upsertFailsWhenNotConnected
+add_integration_test(${TARGET_NAME} metaOK "OK" "OK")
+add_integration_test(${TARGET_NAME} metaFailsWhenNotConnected
         "Notconnected"
         "Notconnected")
-add_integration_test(${TARGET_NAME} upsertFailsOnWrongMeta "parseerror" "parseerror")
+add_integration_test(${TARGET_NAME} metaFailsOnWrongMeta "parseerror" "parseerror")
 add_test_cleanup(${TARGET_NAME})
diff --git a/tests/automatic/mongo_db/upsert/cleanup_linux.sh b/tests/automatic/mongo_db/meta/cleanup_linux.sh
similarity index 100%
rename from tests/automatic/mongo_db/upsert/cleanup_linux.sh
rename to tests/automatic/mongo_db/meta/cleanup_linux.sh
diff --git a/tests/automatic/mongo_db/upsert/cleanup_windows.bat b/tests/automatic/mongo_db/meta/cleanup_windows.bat
similarity index 100%
rename from tests/automatic/mongo_db/upsert/cleanup_windows.bat
rename to tests/automatic/mongo_db/meta/cleanup_windows.bat
diff --git a/tests/automatic/mongo_db/meta/meta_mongodb.cpp b/tests/automatic/mongo_db/meta/meta_mongodb.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..6a9f07b0c87eda088640d247e9c463698e989b78
--- /dev/null
+++ b/tests/automatic/mongo_db/meta/meta_mongodb.cpp
@@ -0,0 +1,101 @@
+#include <iostream>
+#include <chrono>
+
+#include "../../../common/cpp/src/database/mongodb_client.h"
+#include "testing.h"
+#include "asapo/database/db_error.h"
+
+using asapo::Error;
+
+void Assert(const Error& error, const std::string& expect) {
+    std::string result;
+    if (error == nullptr) {
+        result = "OK";
+    } else {
+        result = error->Explain();
+    }
+    M_AssertContains(result, expect);
+}
+
+struct Args {
+    std::string keyword;
+};
+
+Args GetArgs(int argc, char* argv[]) {
+    if (argc != 2) {
+        std::cout << "Wrong number of arguments" << std::endl;
+        exit(EXIT_FAILURE);
+    }
+    return Args{argv[1]};
+}
+
+
+int main(int argc, char* argv[]) {
+    auto args = GetArgs(argc, argv);
+    asapo::MongoDBClient db;
+
+    std::string json;
+    if (args.keyword == "parseerror") {
+        json = R"("id1":{"test1":2}})";
+    } else {
+        json = R"({"id1":{"test1":2}})";
+    }
+
+    auto stream_name = R"(stream/test_/\ ."$)";
+
+
+    if (args.keyword != "Notconnected") {
+        db.Connect("127.0.0.1", "test");
+    }
+
+    auto mode = asapo::MetaIngestMode{asapo::MetaIngestOp::kReplace, true};
+    auto err = db.InsertMeta("meta", "0", reinterpret_cast<const uint8_t*>(json.c_str()), json.size(), mode);
+    if (err) {
+        std::cout << err->Explain() << std::endl;
+    }
+    Assert(err, args.keyword);
+
+    err = db.InsertMeta("meta", "0", reinterpret_cast<const uint8_t*>(json.c_str()), json.size(), mode);
+    if (err) {
+        std::cout << err->Explain() << std::endl;
+    }
+    Assert(err, args.keyword);
+
+    if (args.keyword == "OK") {
+        asapo::MetaIngestMode mode{asapo::MetaIngestOp::kInsert, false};
+        std::string meta = R"({"data":"test","data1":"test1","embedded":{"edata":1}})";
+        err =
+            db.InsertMeta("meta", stream_name, reinterpret_cast<const uint8_t*>(meta.c_str()), meta.size(), mode);
+        M_AssertEq(nullptr, err);
+        err =
+            db.InsertMeta("meta", stream_name, reinterpret_cast<const uint8_t*>(meta.c_str()), meta.size(), mode);
+        M_AssertTrue(err == asapo::DBErrorTemplates::kDuplicateID);
+        mode.op = asapo::MetaIngestOp::kReplace;
+        err =
+            db.InsertMeta("meta", stream_name, reinterpret_cast<const uint8_t*>(meta.c_str()), meta.size(), mode);
+        M_AssertEq(nullptr, err);
+        err = db.InsertMeta("meta", "notexist", reinterpret_cast<const uint8_t*>(meta.c_str()), meta.size(), mode);
+        M_AssertTrue(err == asapo::DBErrorTemplates::kWrongInput);
+
+        std::string meta_res;
+        err = db.GetMetaFromDb("meta", "0", &meta_res);
+        M_AssertEq(meta_res, json);
+
+
+        std::string mod_meta = R"({"data":"newtest","embedded":{"edata":2}})";
+        std::string expected_meta = R"({"data":"newtest","data1":"test1","embedded":{"edata":2}})";
+        mode.op = asapo::MetaIngestOp::kUpdate;
+        err = db.InsertMeta("meta", stream_name, reinterpret_cast<const uint8_t*>(mod_meta.c_str()), mod_meta.size(), mode);
+        M_AssertEq(nullptr, err);
+        err = db.InsertMeta("meta", stream_name, reinterpret_cast<const uint8_t*>(mod_meta.c_str()), mod_meta.size(), mode);
+        M_AssertEq(nullptr, err);
+
+        err = db.GetMetaFromDb("meta", stream_name, &meta_res);
+        M_AssertEq(nullptr, err);
+        M_AssertEq(expected_meta, meta_res);
+
+        db.DeleteStream(stream_name);
+    }
+
+    return 0;
+}
diff --git a/tests/automatic/mongo_db/upsert/upsert_mongodb.cpp b/tests/automatic/mongo_db/upsert/upsert_mongodb.cpp
deleted file mode 100644
index f1420bcf66c8c230edacc789edd3b3645874e16e..0000000000000000000000000000000000000000
--- a/tests/automatic/mongo_db/upsert/upsert_mongodb.cpp
+++ /dev/null
@@ -1,64 +0,0 @@
-#include <iostream>
-#include <chrono>
-
-#include "../../../common/cpp/src/database/mongodb_client.h"
-#include "testing.h"
-
-using asapo::Error;
-
-void Assert(const Error& error, const std::string& expect) {
-    std::string result;
-    if (error == nullptr) {
-        result = "OK";
-    } else {
-        result = error->Explain();
-    }
-    M_AssertContains(result, expect);
-}
-
-struct Args {
-    std::string keyword;
-};
-
-Args GetArgs(int argc, char* argv[]) {
-    if (argc != 2) {
-        std::cout << "Wrong number of arguments" << std::endl;
-        exit(EXIT_FAILURE);
-    }
-    return Args{argv[1]};
-}
-
-
-int main(int argc, char* argv[]) {
-    auto args = GetArgs(argc, argv);
-    asapo::MongoDBClient db;
-
-    std::string json;
-    if (args.keyword == "parseerror") {
-        json = R"("id1":{"test1":2}})";
-    } else {
-        json = R"({"id1":{"test1":2}})";
-    }
-
-
-    if (args.keyword != "Notconnected") {
-        db.Connect("127.0.0.1", "test");
-    }
-
-    auto err = db.Upsert("meta", 0, reinterpret_cast<const uint8_t*>(json.c_str()), json.size());
-    if (err) {
-        std::cout << err->Explain() << std::endl;
-    }
-
-    Assert(err, args.keyword);
-
-    err = db.Upsert("meta", 0, reinterpret_cast<const uint8_t*>(json.c_str()), json.size());
-    if (err) {
-        std::cout << err->Explain() << std::endl;
-    }
-
-    Assert(err, args.keyword);
-
-
-    return 0;
-}
diff --git a/tests/automatic/producer/CMakeLists.txt b/tests/automatic/producer/CMakeLists.txt
index 3f9a657565fab57faf4791b1a933a61366e0e63a..38bd45932b4a517d8ef088d9662a4452d3dc1cd1 100644
--- a/tests/automatic/producer/CMakeLists.txt
+++ b/tests/automatic/producer/CMakeLists.txt
@@ -1,6 +1,8 @@
 add_subdirectory(file_monitor_producer)
 add_subdirectory(beamtime_metadata)
 
+add_subdirectory(cpp_api)
+
 if (BUILD_PYTHON)
     add_subdirectory(python_api)
     add_subdirectory(aai)
diff --git a/tests/automatic/producer/cpp_api/CMakeLists.txt b/tests/automatic/producer/cpp_api/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e4143f91e86755a0531e3678491c4a5f5e3b35d8
--- /dev/null
+++ b/tests/automatic/producer/cpp_api/CMakeLists.txt
@@ -0,0 +1,15 @@
+set(TARGET_NAME producer_api)
+set(SOURCE_FILES producer_api.cpp)
+
+
+################################
+# Executable and link
+################################
+add_executable(${TARGET_NAME} ${SOURCE_FILES})
+target_link_libraries(${TARGET_NAME} test_common asapo-producer)
+
+################################
+# Testing
+################################
+add_script_test("${TARGET_NAME}" "$<TARGET_FILE:${TARGET_NAME}>")
+
diff --git a/tests/automatic/producer/cpp_api/check_linux.sh b/tests/automatic/producer/cpp_api/check_linux.sh
new file mode 100644
index 0000000000000000000000000000000000000000..7d6d7dbc2b4daf4f4890fddbc62e2c69b36711f5
--- /dev/null
+++ b/tests/automatic/producer/cpp_api/check_linux.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+set -e
+
+trap Cleanup EXIT
+
+beamtime_id=asapo_test
+data_source=cpp
+beamline=test
+receiver_root_folder=/tmp/asapo/receiver/files
+facility=test_facility
+year=2019
+receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
+
+
+Cleanup() {
+	echo cleanup
+	rm -rf ${receiver_root_folder}
+  echo "db.dropDatabase()" | mongo ${beamtime_id}_${data_source} >/dev/null
+}
+
+mkdir -p ${receiver_folder}
+
+echo test > file1
+
+$@ "127.0.0.1:8400" $data_source $beamtime_id
diff --git a/tests/automatic/producer/cpp_api/check_windows.bat b/tests/automatic/producer/cpp_api/check_windows.bat
new file mode 100644
index 0000000000000000000000000000000000000000..7e992f19d987b049ff4a238090d209244948ba1e
--- /dev/null
+++ b/tests/automatic/producer/cpp_api/check_windows.bat
@@ -0,0 +1,28 @@
+SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
+SET beamtime_id=asapo_test
+SET beamline=test
+SET data_source=cpp
+SET receiver_root_folder=c:\tmp\asapo\receiver\files
+SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
+SET dbname=%beamtime_id%_%data_source%
+
+mkdir %receiver_folder%
+
+echo test > file1
+
+ping 192.0.2.1 -n 1 -w 1000 > nul
+
+"%1" "127.0.0.1:8400" %data_source% %beamtime_id%   > out
+type out
+
+goto :clean
+
+:error
+call :clean
+exit /b 1
+
+:clean
+rmdir /S /Q %receiver_root_folder%
+echo db.dropDatabase() | %mongo_exe% %dbname%
+
+
diff --git a/tests/automatic/producer/cpp_api/producer_api.cpp b/tests/automatic/producer/cpp_api/producer_api.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..d5e5982ab635c69bbbd2a55283465ab48396e0bd
--- /dev/null
+++ b/tests/automatic/producer/cpp_api/producer_api.cpp
@@ -0,0 +1,99 @@
+#include <iostream>
+#include <vector>
+#include <thread>
+#include <algorithm>
+#include <asapo/asapo_producer.h>
+#include "testing.h"
+
+struct Args {
+    std::string server;
+    std::string source;
+    std::string beamtime;
+};
+
+Args GetArgs(int argc, char* argv[]) {
+    if (argc != 4) {
+        std::cout << "Wrong number of arguments" << std::endl;
+        exit(EXIT_FAILURE);
+    }
+    std::string server{argv[1]};
+    std::string source{argv[2]};
+    std::string beamtime{argv[3]};
+
+    return Args{server, source, beamtime};
+}
+
+void TestMeta(const std::unique_ptr<asapo::Producer>& producer) {
+    asapo::Error err;
+    std::string meta = R"({"data":"test","embedded":{"edata":2}})";
+    producer->SendBeamtimeMetadata(meta, asapo::MetaIngestMode{asapo::MetaIngestOp::kInsert, true}, nullptr);
+    producer->WaitRequestsFinished(5000);
+    auto meta_received = producer->GetBeamtimeMeta(5000, &err);
+    M_AssertTrue(meta_received == meta);
+    std::string meta_update = R"({"embedded":{"edata":3}})";
+    std::string meta_updated = R"({"data":"test","embedded":{"edata":3}})";
+    producer->SendBeamtimeMetadata(meta_update, asapo::MetaIngestMode{asapo::MetaIngestOp::kUpdate, false}, nullptr);
+    producer->WaitRequestsFinished(5000);
+    meta_received = producer->GetBeamtimeMeta(5000, &err);
+    M_AssertTrue(meta_received == meta_updated);
+}
+
+
+void Test(const std::unique_ptr<asapo::Producer>& producer) {
+    asapo::MessageMeta fi;
+    asapo::Error err;
+
+    std::string client, server;
+    bool supported;
+    err = producer->GetVersionInfo(&client, &server, &supported);
+    M_AssertTrue(err == nullptr, "Version OK");
+    M_AssertTrue(supported, "client supported by server");
+    M_AssertTrue(!client.empty(), "client version");
+    M_AssertTrue(!server.empty(), "server version");
+
+
+    TestMeta(producer);
+
+    producer->GetStreamInfo("default", 5000, &err);
+    if (err) {
+        printf("%s\n", err->Explain().c_str());
+    }
+    M_AssertTrue(err == nullptr, "stream info");
+
+}
+
+
+std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) {
+    asapo::Error err;
+    auto producer = asapo::Producer::Create(args.server, 2,
+                                            asapo::RequestHandlerType::kTcp,
+                                            asapo::SourceCredentials{asapo::SourceType::kProcessed, args.beamtime,
+                                                    "", args.source, ""}, 60000, &err);
+    if (err) {
+        std::cerr << "Cannot start producer. ProducerError: " << err << std::endl;
+        exit(EXIT_FAILURE);
+    }
+
+    producer->EnableLocalLog(true);
+    producer->SetLogLevel(asapo::LogLevel::Debug);
+    return producer;
+}
+
+
+void TestAll(const Args& args) {
+    asapo::Error err;
+    auto producer = CreateProducer(args);
+    if (producer == nullptr) {
+        std::cout << "Error CreateProducer: " << err << std::endl;
+        exit(EXIT_FAILURE);
+    }
+    Test(producer);
+}
+
+int main(int argc, char* argv[]) {
+
+    auto args = GetArgs(argc, argv);
+
+    TestAll(args);
+    return 0;
+}
diff --git a/tests/automatic/producer/python_api/check_linux.sh b/tests/automatic/producer/python_api/check_linux.sh
index c86f6bd6ae66428eb24dcc37703577a70953d1a3..51552b6eb0ce43d9eb1fd742872b1ab1859d0c95 100644
--- a/tests/automatic/producer/python_api/check_linux.sh
+++ b/tests/automatic/producer/python_api/check_linux.sh
@@ -27,14 +27,19 @@ echo test > file1
 
 $1 $3 $data_source $beamtime_id  "127.0.0.1:8400" &> out || cat out
 cat out
-echo count successfully send, expect 15
-cat out | grep "successfuly sent" | wc -l | tee /dev/stderr | grep 15
+echo count successfully send, expect 17
+cat out | grep "successfuly sent" | wc -l | tee /dev/stderr | grep 17
+echo count wrong input, expect 11
+cat out | grep "wrong input" | wc -l | tee /dev/stderr | grep 11
+
+echo count wrong json, expect 2
+cat out | grep "JSON parse error" | wc -l | tee /dev/stderr | grep 2
 echo count same id, expect 4
 cat out | grep "already have record with same id" | wc -l | tee /dev/stderr | grep 4
 echo count duplicates, expect 6
 cat out | grep "duplicate" | wc -l | tee /dev/stderr | grep 6
-echo count data in callback, expect 3
-cat out | grep "'data':" | wc -l  | tee /dev/stderr | grep 3
+echo count data in callback, expect 6
+cat out | grep "'data':" | wc -l  | tee /dev/stderr | grep 6
 echo check found local io error
 cat out | grep "local i/o error"
 cat out | grep "Finished successfully"
diff --git a/tests/automatic/producer/python_api/check_windows.bat b/tests/automatic/producer/python_api/check_windows.bat
index 6dbbf441a3e80626d3e7405915e3bf21a49b7fcb..c854be8786882fa78123896faabca55d1054942c 100644
--- a/tests/automatic/producer/python_api/check_windows.bat
+++ b/tests/automatic/producer/python_api/check_windows.bat
@@ -18,7 +18,7 @@ set PYTHONPATH=%2
 type out
 set NUM=0
 for /F %%N in ('find /C "successfuly sent" ^< "out"') do set NUM=%%N
-echo %NUM% | findstr 15 || goto error
+echo %NUM% | findstr 17 || goto error
 
 for /F %%N in ('find /C "} wrong input: Bad request: already have record with same id" ^< "out"') do set NUM=%%N
 echo %NUM% | findstr 2 || goto error
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index 64e30bc439739f6e32d9481271c002739d9f717a..6f4bad1586c4f404f4c98afa3ba1157fa5efe18e 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -143,6 +143,11 @@ producer.send_file(1, local_path="./file1", exposed_path="processed/" + data_sou
                    user_meta='{"test_key1":"test_val"}',
                    ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_DATA | asapo_producer.INGEST_MODE_STORE_IN_FILESYSTEM,callback=callback)
 
+producer.send_beamtime_meta('{"data":"bt_meta"}', callback = callback)
+producer.send_stream_meta('{"data":"st_meta"}',stream = 'stream', callback = callback)
+producer.send_stream_meta('bla',stream = 'stream', callback = callback)
+
+
 producer.wait_requests_finished(50000)
 n = producer.get_requests_queue_size()
 assert_eq(n, 0, "requests in queue")
@@ -174,6 +179,14 @@ producer.send_stream_finished_flag("stream/test $", 2, next_stream = "next_strea
 producer.wait_requests_finished(10000)
 
 
+#check meta
+bt_meta = producer.get_beamtime_meta()
+stream_meta = producer.get_stream_meta(stream = 'stream')
+assert_eq(bt_meta['data'], 'bt_meta', "beamtime meta")
+assert_eq(stream_meta['data'], 'st_meta', "stream meta")
+no_meta = producer.get_stream_meta(stream = 'notexist')
+assert_eq(no_meta, None, "no meta")
+
 #stream infos
 info = producer.stream_info()
 assert_eq(info['lastId'], 10, "stream_info last id")