diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1b04c08a883546bb55ed6daf6289f6bb9da1ea14..54c94a909348f15b62a209218140b7e5a319dbd5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,13 +1,20 @@
-## 21.03.1
+## 21.03.2
 
-IMPROVEMENTS
-* Consumer API - retry file delivery/reading with timeout (can be useful for the case file arrives after metadta ingested asapo, e.g. for slow NFS transfer,...)      
+FEATURES
+* implemented possibility to delete stream (only metadata, not files yet) 
 
+IMPROVEMENTS
+* Consumer API - retry file delivery/reading with timeout (can be useful for the case file arrives after was metadta ingested, e.g. for slow NFS transfer,...)
+      
 BUG FIXES
-* Core services: fix LDAP authorization for raw data type Producers
 * Consumer API: fix race condition in GetStreamList/get_stream_list
 * Producer API: fix segfault in send_stream_finished_flag
+* Producer API: fix deadlock in producer timeout
 
+## 21.03.1
+
+BUG FIXES
+* Core services: fix LDAP authorization for raw data type Producers
 
 ## 21.03.0
 
diff --git a/CMakeLists.txt b/CMakeLists.txt
index bb7d55f616dcf2d1100f4590d376fc893221b40c..7d1f436916ddd170c2197f266ad608dfd936e7a6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -2,14 +2,14 @@ cmake_minimum_required(VERSION 3.7)
 project(ASAPO)
 
 #protocol version changes if one of the microservice API's change
-set (ASAPO_CONSUMER_PROTOCOL "v0.1")
-set (ASAPO_PRODUCER_PROTOCOL "v0.1")
+set (ASAPO_CONSUMER_PROTOCOL "v0.2")
+set (ASAPO_PRODUCER_PROTOCOL "v0.2")
 
 set (ASAPO_DISCOVERY_API_VER "v0.1")
 set (ASAPO_AUTHORIZER_API_VER "v0.1")
-set (ASAPO_BROKER_API_VER "v0.1")
+set (ASAPO_BROKER_API_VER "v0.2")
 set (ASAPO_FILE_TRANSFER_SERVICE_API_VER "v0.1")
-set (ASAPO_RECEIVER_API_VER "v0.1")
+set (ASAPO_RECEIVER_API_VER "v0.2")
 set (ASAPO_RDS_API_VER "v0.1")
 
 set(CMAKE_CXX_STANDARD 11)
diff --git a/CMakeModules/testing_cpp.cmake b/CMakeModules/testing_cpp.cmake
index cd62e14b3178ba319c894c06ca8e124db9ddc066..95364659d91c2e92923eee675303f57f51e86506 100644
--- a/CMakeModules/testing_cpp.cmake
+++ b/CMakeModules/testing_cpp.cmake
@@ -13,6 +13,7 @@ set (TOKENS "${TOKENS};BT_DATA_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdG
 set (TOKENS "${TOKENS};BT_TEST_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkybnViaXB0MzBsMjlpcXNxMCIsInN1YiI6ImJ0X3Rlc3QiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIl19fQ.8dh4KIusIVk75MGiWjoj23_cesLLWSMDjU8vb0RHVtU")
 set (TOKENS "${TOKENS};BT_AAA_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkycDFiaXB0MzBub3AwcTNlZyIsInN1YiI6ImJ0X2FhYSIsIkV4dHJhQ2xhaW1zIjp7IkFjY2Vzc1R5cGVzIjpbInJlYWQiXX19.dt3ifrG3zqQP4uM2kaoe7ydDjUdFeasOB07fVRfFApE")
 set (TOKENS "${TOKENS};BT_TEST_RUN_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTk0NjYzaXB0Mzdma2w0YmVrMCIsInN1YiI6ImJ0X3Rlc3RfcnVuIiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsicmVhZCJdfX0.QJjoGOlzMvOUk7dK2bbDgSEM5-1mO6wmpmESYL6McdU")
+set (TOKENS "${TOKENS};BT_TEST_RUN_RW_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMjlzNDByaXB0M2JuczJtaTA1MCIsInN1YiI6ImJ0X3Rlc3RfcnVuIiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsicmVhZCIsIndyaXRlIl19fQ.vwOak8smIF23TaRo5KzRgvA8d_fPLKnd9dXglMsm7d8")
 
 if (BUILD_TESTS)
     set(ASAPO_MINIMUM_COVERAGE 70)
diff --git a/PROTOCOL-VERSIONS.md b/PROTOCOL-VERSIONS.md
index 8118cdb245b13c61b77c37a33f619bf864e5f551..132bf4171cf1dc442327be4aad15f8975182e54e 100644
--- a/PROTOCOL-VERSIONS.md
+++ b/PROTOCOL-VERSIONS.md
@@ -1,10 +1,12 @@
 ### Producer Protocol
-| Release      | Supported by client | Supported by server  | Status           |
+| Release      | used by client      | Supported by server  | Status           |
 | ------------ | ------------------- | -------------------- | ---------------- |
-| v0.1         | 21.03.0 - 21.03.1   | 21.03.0  - 21.03.1   | Current version  |
+| v0.2         | 21.03.2 - 21.03.2   | 21.03.2  - 21.03.2   | Current version  |
+| v0.1         | 21.03.0 - 21.03.1   | 21.03.0  - 21.03.2   | Deprecates from 01.06.2022  |
 
 
 ### Consumer Protocol
-| Release      | Supported by client | Supported by server  | Status           |
+| Release      | used by client      | Supported by server  | Status           |
 | ------------ | ------------------- | -------------------- | ---------------- |
-| v0.1         | 21.03.0 - 21.03.1   | 21.03.0  - 21.03.1   | Current version  |
+| v0.2         | 21.03.2 - 21.03.2   | 21.03.2  - 21.03.2   | Current version  |
+| v0.1         | 21.03.0 - 21.03.1   | 21.03.0  - 21.03.2   | Deprecates from 01.06.2022  |
diff --git a/VERSIONS.md b/VERSIONS.md
index a5326685f3d8a74bdc33b9232ab326a686adb589..69c95ab10099c9f35ab8471492157c536a4f350f 100644
--- a/VERSIONS.md
+++ b/VERSIONS.md
@@ -1,13 +1,15 @@
 ### Producer API
 
-| Release      | API changed | Breaking changes | Protocol | Supported by server from/to | Status          |Comment|
-| ------------ | ----------- |----------------- | -------- | ------------------------- | ----------------- | ------- |
-| 21.03.1      | No          | No               | v0.1     | 21.03.0/21.03.1           | current version  |bugfix in server|
-| 21.03.0      | Yes         | Yes              | v0.1     | 21.03.0/21.03.1            |                 |          |
+| Release      | API changed | Breaking changes | Protocol | Supported by server from/to | Status              |Comment|
+| ------------ | ----------- |----------------- | -------- | ------------------------- | --------------------- | ------- |
+| 21.03.2      | Yes         | No               | v0.2     | 21.03.2/21.03.2           | current version         |bugfixes, add delete_stream|
+| 21.03.1      | No          | No               | v0.1     | 21.03.0/21.03.2           | deprecates 01.06.2022    |bugfix in server|
+| 21.03.0      | Yes         | Yes              | v0.1     | 21.03.0/21.03.2           |                 |          |
 
 ### Consumer API
 
-| Release      | API changed | Breaking changes | Protocol | Supported by server from/to | Status |Comment|
-| ------------ | ----------- |----------------- | -------- | ------------------------- | ------- | ------- |
-| 21.03.1      | No          | No               | v0.1     | 21.03.0/21.03.1           | current version  |bugfix in server|
-| 21.03.0      | Yes         | Yes              | v0.1     | 21.03.0/21.03.1           |         |        |
+| Release      | API changed | Breaking changes | Protocol | Supported by server from/to | Status         |Comment|
+| ------------ | ----------- |----------------- | -------- | ------------------------- | ---------------- | ------- |
+| 21.03.2      | Yes          | No              | v0.2     | 21.03.2/21.03.2           | current version  |bugfixes, add delete_stream|
+| 21.03.1      | No          | No               | v0.1     | 21.03.0/21.03.2           | deprecates 01.06.2022       |bugfix in server|
+| 21.03.0      | Yes         | Yes              | v0.1     | 21.03.0/21.03.2           |                  |        |
diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go
index 5e63509e5a687a2bc95bd77df8ae6a6e13bba017..24c7dde96cad848bc8cad0c98c0ea4f96cc169b1 100644
--- a/broker/src/asapo_broker/database/mongodb.go
+++ b/broker/src/asapo_broker/database/mongodb.go
@@ -16,7 +16,6 @@ import (
 	"strconv"
 	"strings"
 	"sync"
-	"sync/atomic"
 	"time"
 )
 
@@ -76,7 +75,6 @@ const stream_filter_all = "all"
 const stream_filter_finished = "finished"
 const stream_filter_unfinished = "unfinished"
 
-
 var dbSessionLock sync.Mutex
 
 type SizeRecord struct {
@@ -87,7 +85,7 @@ type Mongodb struct {
 	client                *mongo.Client
 	timeout               time.Duration
 	settings              DBSettings
-	lastReadFromInprocess int64
+	lastReadFromInprocess map[string]int64
 }
 
 func (db *Mongodb) SetSettings(settings DBSettings) {
@@ -104,6 +102,9 @@ func (db *Mongodb) Ping() (err error) {
 }
 
 func (db *Mongodb) Connect(address string) (err error) {
+	dbSessionLock.Lock()
+	defer dbSessionLock.Unlock()
+
 	if db.client != nil {
 		return &DBError{utils.StatusServiceUnavailable, already_connected_msg}
 	}
@@ -119,19 +120,21 @@ func (db *Mongodb) Connect(address string) (err error) {
 		return err
 	}
 
-	atomic.StoreInt64(&db.lastReadFromInprocess, time.Now().Unix())
+	if db.lastReadFromInprocess == nil {
+		db.lastReadFromInprocess = make(map[string]int64, 100)
+	}
 
 	return db.Ping()
 }
 
 func (db *Mongodb) Close() {
+	dbSessionLock.Lock()
+	defer dbSessionLock.Unlock()
 	if db.client != nil {
-		dbSessionLock.Lock()
 		ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
 		defer cancel()
 		db.client.Disconnect(ctx)
 		db.client = nil
-		dbSessionLock.Unlock()
 	}
 }
 
@@ -358,7 +361,7 @@ func (db *Mongodb) negAckRecord(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.GroupId, input.Id, input.Params.DelayMs, 1)
+	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, input.Id, input.Params.DelayMs, 1, true)
 	return []byte(""), err
 }
 
@@ -372,7 +375,7 @@ func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 	_, err = c.InsertOne(context.Background(), &record)
 
 	if err == nil {
-		c = db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.GroupId)
+		c = db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
 		_, err_del := c.DeleteOne(context.Background(), bson.M{"_id": record.ID})
 		if err_del != nil {
 			return nil, &DBError{utils.StatusWrongInput, err.Error()}
@@ -439,7 +442,7 @@ func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delay
 	return res.ID, nil
 }
 
-func (db *Mongodb) InsertRecordToInprocess(db_name string, collection_name string, id int, delayMs int, nResendAttempts int) error {
+func (db *Mongodb) InsertRecordToInprocess(db_name string, collection_name string, id int, delayMs int, nResendAttempts int, replaceIfExist bool) error {
 	record := InProcessingRecord{
 		id, nResendAttempts, 0, time.Now().UnixNano() + int64(delayMs*1e6),
 	}
@@ -447,7 +450,11 @@ func (db *Mongodb) InsertRecordToInprocess(db_name string, collection_name strin
 	c := db.client.Database(db_name).Collection(collection_name)
 	_, err := c.InsertOne(context.TODO(), &record)
 	if duplicateError(err) {
-		return nil
+		if !replaceIfExist {
+			return nil
+		}
+		_, err := c.ReplaceOne(context.TODO(), bson.M{"_id": id}, &record)
+		return err
 	}
 	return err
 }
@@ -461,7 +468,7 @@ func (db *Mongodb) InsertToInprocessIfNeeded(db_name string, collection_name str
 		return err
 	}
 
-	return db.InsertRecordToInprocess(db_name, collection_name, id, delayMs, nResendAttempts)
+	return db.InsertRecordToInprocess(db_name, collection_name, id, delayMs, nResendAttempts, false)
 
 }
 
@@ -477,8 +484,11 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 		nResendAttempts = -1
 	}
 	tNow := time.Now().Unix()
-	if (atomic.LoadInt64(&db.lastReadFromInprocess) <= tNow-int64(db.settings.ReadFromInprocessPeriod)) || ignoreTimeout {
-		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.GroupId, delayMs, nResendAttempts)
+	dbSessionLock.Lock()
+	t := db.lastReadFromInprocess[request.DbCollectionName+"_"+request.GroupId]
+	dbSessionLock.Unlock()
+	if (t <= tNow-int64(db.settings.ReadFromInprocessPeriod)) || ignoreTimeout {
+		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, delayMs, nResendAttempts)
 		if err != nil {
 			log_str := "error getting unprocessed id " + request.DbName + ", groupid: " + request.GroupId + ":" + err.Error()
 			logger.Debug(log_str)
@@ -491,7 +501,9 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 			return 0, 0, err
 		}
 	} else {
-		atomic.StoreInt64(&db.lastReadFromInprocess, time.Now().Unix())
+		dbSessionLock.Lock()
+		db.lastReadFromInprocess[request.DbCollectionName+"_"+request.GroupId] = time.Now().Unix()
+		dbSessionLock.Unlock()
 	}
 
 	return record_ind, max_ind, nil
@@ -590,7 +602,7 @@ func (db *Mongodb) getNextRecord(request Request) ([]byte, error) {
 	}
 
 	if err == nil {
-		err_update := db.InsertToInprocessIfNeeded(request.DbName, inprocess_collection_name_prefix+request.GroupId, nextInd, request.ExtraParam)
+		err_update := db.InsertToInprocessIfNeeded(request.DbName, inprocess_collection_name_prefix+request.DbCollectionName+"_"+request.GroupId, nextInd, request.ExtraParam)
 		if err_update != nil {
 			return nil, err_update
 		}
@@ -645,7 +657,7 @@ func (db *Mongodb) resetCounter(request Request) ([]byte, error) {
 		return []byte(""), err
 	}
 
-	c := db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.GroupId)
+	c := db.client.Database(request.DbName).Collection(inprocess_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
 	_, err_del := c.DeleteMany(context.Background(), bson.M{"_id": bson.M{"$gte": id}})
 	if err_del != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
@@ -792,6 +804,98 @@ func (db *Mongodb) nacks(request Request) ([]byte, error) {
 	return utils.MapToJson(&res)
 }
 
+func (db *Mongodb) deleteCollection(request Request, name string) error {
+	return db.client.Database(request.DbName).Collection(name).Drop(context.Background())
+}
+
+func (db *Mongodb) collectionExist(request Request, name string) (bool, error) {
+	result, err := db.client.Database(request.DbName).ListCollectionNames(context.TODO(), bson.M{"name": name})
+	if err != nil {
+		return false, err
+	}
+	if len(result) == 1 {
+		return true, nil
+	}
+	return false, nil
+}
+
+func (db *Mongodb) deleteDataCollection(errorOnNotexist bool, request Request) error {
+	dataCol := data_collection_name_prefix + request.DbCollectionName
+	if errorOnNotexist {
+		exist, err := db.collectionExist(request, dataCol)
+		if err != nil {
+			return err
+		}
+		if !exist {
+			return &DBError{utils.StatusWrongInput, "stream " + request.DbCollectionName + " does not exist"}
+		}
+	}
+	return db.deleteCollection(request, dataCol)
+}
+
+func (db *Mongodb) deleteDocumentsInCollection(request Request, collection string, field string, pattern string) error {
+	filter := bson.M{field: bson.D{{"$regex", primitive.Regex{Pattern: pattern, Options: "i"}}}}
+	_, err := db.client.Database(request.DbName).Collection(collection).DeleteMany(context.TODO(), filter)
+	return err
+}
+
+func (db *Mongodb) deleteCollectionsWithPrefix(request Request, prefix string) error {
+	cols, err := db.client.Database(request.DbName).ListCollectionNames(context.TODO(), bson.M{"name": bson.D{
+		{"$regex", primitive.Regex{Pattern: "^" + prefix, Options: "i"}}}})
+	if err != nil {
+		return err
+	}
+
+	for _, col := range cols {
+		err := db.deleteCollection(request, col)
+		if err != nil {
+			return err
+		}
+	}
+
+	return nil
+}
+
+func (db *Mongodb) deleteServiceMeta(request Request) error {
+	err := db.deleteCollectionsWithPrefix(request, acks_collection_name_prefix+request.DbCollectionName)
+	if err != nil {
+		return err
+	}
+	err = db.deleteCollectionsWithPrefix(request, inprocess_collection_name_prefix+request.DbCollectionName)
+	if err != nil {
+		return err
+	}
+	return db.deleteDocumentsInCollection(request, pointer_collection_name, "_id", ".*_"+request.DbCollectionName+"$")
+}
+
+func (db *Mongodb) deleteStream(request Request) ([]byte, error) {
+	params := struct {
+		ErrorOnNotExist *bool
+		DeleteMeta      *bool
+	}{}
+
+	err := json.Unmarshal([]byte(request.ExtraParam), &params)
+	if err != nil {
+		return nil, err
+	}
+
+	if params.DeleteMeta == nil || params.ErrorOnNotExist == nil {
+		return nil, &DBError{utils.StatusWrongInput, "wrong params: " + request.ExtraParam}
+	}
+	if !*params.DeleteMeta {
+		logger.Debug("skipping delete stream meta for " + request.DbCollectionName + " in " + request.DbName)
+		return nil, nil
+	}
+
+	err = db.deleteDataCollection(*params.ErrorOnNotExist, request)
+	if err != nil {
+		return nil, err
+	}
+
+	err = db.deleteServiceMeta(request)
+	return nil, err
+}
+
 func (db *Mongodb) lastAck(request Request) ([]byte, error) {
 	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
@@ -910,6 +1014,8 @@ func (db *Mongodb) ProcessRequest(request Request) (answer []byte, err error) {
 		return db.nacks(request)
 	case "lastack":
 		return db.lastAck(request)
+	case "delete_stream":
+		return db.deleteStream(request)
 	}
 
 	return nil, errors.New("Wrong db operation: " + request.Op)
diff --git a/broker/src/asapo_broker/database/mongodb_streams.go b/broker/src/asapo_broker/database/mongodb_streams.go
index 4d32341c6044570d9c3028f5770e7f08e8c72c63..278ef3c57062196067b1d78c7814b0ecfcfba70e 100644
--- a/broker/src/asapo_broker/database/mongodb_streams.go
+++ b/broker/src/asapo_broker/database/mongodb_streams.go
@@ -28,14 +28,15 @@ type StreamsRecord struct {
 
 type Streams struct {
 	records     map[string]StreamsRecord
-	lastUpdated map[string]int64
+	lastUpdated map[string]time.Time
+	lastSynced  map[string]time.Time
 }
 
-var streams = Streams{lastUpdated: make(map[string]int64, 0), records: make(map[string]StreamsRecord, 0)}
+var streams = Streams{lastSynced: make(map[string]time.Time, 0),lastUpdated: make(map[string]time.Time, 0), records: make(map[string]StreamsRecord, 0)}
 var streamsLock sync.Mutex
 
 func (ss *Streams) tryGetFromCache(db_name string, updatePeriodMs int) (StreamsRecord, error) {
-	if ss.lastUpdated[db_name] < time.Now().UnixNano()-int64(updatePeriodMs*1000000) {
+	if time.Now().Sub(ss.lastUpdated[db_name]).Milliseconds() > int64(updatePeriodMs) {
 		return StreamsRecord{}, errors.New("cache expired")
 	}
 	rec, ok := ss.records[db_name]
@@ -133,19 +134,19 @@ func updateStreamInfofromCurrent(currentStreams []StreamInfo, record StreamInfo,
 	return found, false
 }
 
-func updateStreamInfos(db *Mongodb, db_name string, rec *StreamsRecord) error {
+func updateStreamInfos(db *Mongodb, db_name string, rec *StreamsRecord,forceSync bool) error {
 	currentStreams := getCurrentStreams(db_name)
 	for i, record := range rec.Streams {
 		found, mayContinue := updateStreamInfofromCurrent(currentStreams, record, &rec.Streams[i])
-		if mayContinue {
+		if mayContinue && !forceSync {
 			continue
 		}
-		if !found { // set timestamp
+		if !found || forceSync { // set timestamp
 			if err := fillInfoFromEarliestRecord(db, db_name, rec, record, i); err != nil {
 				return err
 			}
 		}
-		if err := fillInfoFromLastRecord(db, db_name, rec, record, i); err != nil { // update firstStream last record (timestamp, stream finished flag)
+		if err := fillInfoFromLastRecord(db, db_name, rec, record, i); err != nil { // update last record (timestamp, stream finished flag)
 			return err
 		}
 	}
@@ -163,17 +164,26 @@ func (ss *Streams) updateFromDb(db *Mongodb, db_name string) (StreamsRecord, err
 	if err != nil {
 		return StreamsRecord{}, err
 	}
-	err = updateStreamInfos(db, db_name, &rec)
+
+	forceSync:= false
+	if time.Now().Sub(ss.lastSynced[db_name]).Seconds() > 5 {
+		forceSync = true
+	}
+	err = updateStreamInfos(db, db_name, &rec,forceSync)
 	if err != nil {
 		return StreamsRecord{}, err
 	}
 
+	if forceSync {
+		ss.lastSynced[db_name] = time.Now()
+	}
+
 	sortRecords(&rec)
 	if len(rec.Streams) > 0 {
 		res :=StreamsRecord{}
 		utils.DeepCopy(rec,&res)
 		ss.records[db_name] = res
-		ss.lastUpdated[db_name] = time.Now().UnixNano()
+		ss.lastUpdated[db_name] = time.Now()
 	}
 	return rec, nil
 }
diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go
index b0f2e97a38901e168cb1964a44beacb2de73593a..d6e7b0d717eeb69edd8699444d4231c72dcf5b73 100644
--- a/broker/src/asapo_broker/database/mongodb_test.go
+++ b/broker/src/asapo_broker/database/mongodb_test.go
@@ -1122,10 +1122,14 @@ func TestMongoDBNegAck(t *testing.T) {
 	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
 	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"}) // first time message from negack
 	_, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})  // second time nothing
+	db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "negackmessage", ExtraParam: string(bparam)})
+	_, err2 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})  // second time nothing
 
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 	assert.NotNil(t, err1)
+	assert.Nil(t, err2)
+
 	if err1 != nil {
 		assert.Equal(t, utils.StatusNoData, err1.(*DBError).Code)
 		assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"\"}", err1.Error())
@@ -1154,3 +1158,40 @@ func TestMongoDBGetNextClearsInprocessAfterReset(t *testing.T) {
 	assert.Equal(t, string(rec1_expect), string(res2))
 	assert.Equal(t, string(rec1_expect), string(res3))
 }
+
+var testsDeleteStream = []struct {
+	stream  string
+	params  string
+	ok      bool
+	ok2 bool
+	message string
+}{
+	{"test", "{\"ErrorOnNotExist\":true,\"DeleteMeta\":true}", true,false, "delete stream"},
+	{"test", "{\"ErrorOnNotExist\":false,\"DeleteMeta\":true}", true, true,"delete stream"},
+}
+
+func TestDeleteStreams(t *testing.T) {
+	for _, test := range testsDeleteStream {
+		db.Connect(dbaddress)
+		db.insertRecord(dbname, test.stream, &rec_finished11)
+
+		_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		if test.ok {
+			rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+			acks_exist,_:= db.collectionExist(Request{DbName: dbname, ExtraParam: ""},acks_collection_name_prefix+test.stream)
+			inprocess_exist,_:= db.collectionExist(Request{DbName: dbname, ExtraParam: ""},inprocess_collection_name_prefix+test.stream)
+			assert.Equal(t,0,len(rec.Streams),test.message)
+			assert.Equal(t,false,acks_exist,test.message)
+			assert.Equal(t,false,inprocess_exist,test.message)
+			assert.Nil(t, err, test.message)
+		} else {
+			assert.NotNil(t, err, test.message)
+		}
+		_, err = db.ProcessRequest(Request{DbName: dbname, DbCollectionName: test.stream, GroupId: "", Op: "delete_stream", ExtraParam: test.params})
+		if test.ok2 {
+			assert.Nil(t, err, test.message+" 2")
+		} else {
+			assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code, test.message+" 2")
+		}
+	}
+}
diff --git a/broker/src/asapo_broker/server/get_streams.go b/broker/src/asapo_broker/server/get_streams.go
index a22274553f58663c2bdbd830c246344b48f0dea9..7cc8d0875af1a6f6f0ca335c8707b928457639ad 100644
--- a/broker/src/asapo_broker/server/get_streams.go
+++ b/broker/src/asapo_broker/server/get_streams.go
@@ -1,10 +1,12 @@
 package server
 
 import (
+	"fmt"
 	"net/http"
 )
 
 func routeGetStreams(w http.ResponseWriter, r *http.Request) {
+	fmt.Println(r.RequestURI)
 	keys := r.URL.Query()
 	from := keys.Get("from")
 	filter := keys.Get("filter")
diff --git a/broker/src/asapo_broker/server/listroutes.go b/broker/src/asapo_broker/server/listroutes.go
index b2d87ccf2d9a1d2f1bad08ba88c567658676c6ef..c6064e117787ebc6a1f97e69a2a4734e34db69c5 100644
--- a/broker/src/asapo_broker/server/listroutes.go
+++ b/broker/src/asapo_broker/server/listroutes.go
@@ -23,6 +23,12 @@ var listRoutes = utils.Routes{
 		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/streams",
 		routeGetStreams,
 	},
+	utils.Route{
+		"DeleteStream",
+		"Post",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/delete",
+		routeDeleteStream,
+	},
 	utils.Route{
 		"GetLast",
 		"Get",
diff --git a/broker/src/asapo_broker/server/post_create_group_test.go b/broker/src/asapo_broker/server/post_create_group_test.go
index 46f6fb09edd9241e5f54aad5dc7e89192db03152..dcef0d009e109426d8cb95e9fc5dabd31a0b7692 100644
--- a/broker/src/asapo_broker/server/post_create_group_test.go
+++ b/broker/src/asapo_broker/server/post_create_group_test.go
@@ -34,6 +34,6 @@ func TestGetNewGroup(t *testing.T) {
 }
 
 func TestGetNewGroupWrongProtocol(t *testing.T) {
-	w := doRequest("/creategroup", "POST","","/v0.2")
+	w := doRequest("/creategroup", "POST","","/v1.2")
 	assert.Equal(t, http.StatusUnsupportedMediaType, w.Code, "wrong request")
 }
diff --git a/broker/src/asapo_broker/server/post_delete_stream.go b/broker/src/asapo_broker/server/post_delete_stream.go
new file mode 100644
index 0000000000000000000000000000000000000000..f50ad0d1816bdf6da5c178919cc11cf4843d10d7
--- /dev/null
+++ b/broker/src/asapo_broker/server/post_delete_stream.go
@@ -0,0 +1,15 @@
+package server
+
+import (
+	"io/ioutil"
+	"net/http"
+)
+
+func routeDeleteStream(w http.ResponseWriter, r *http.Request) {
+	body, err := ioutil.ReadAll(r.Body)
+	if err != nil {
+		http.Error(w, err.Error(), 500)
+		return
+	}
+	processRequest(w, r, "delete_stream", string(body), false)
+}
diff --git a/broker/src/asapo_broker/server/post_op_image.go b/broker/src/asapo_broker/server/post_op_image.go
index 1440812f56e1a7f2f915233181a1cf9bdcdd176e..4993ea42ac96c50ed3817c107b1f486547dd574c 100644
--- a/broker/src/asapo_broker/server/post_op_image.go
+++ b/broker/src/asapo_broker/server/post_op_image.go
@@ -12,6 +12,7 @@ type MessageOp struct {
 	Op string
 	Params map[string]interface{} `json:",omitempty"`
 }
+
 func routeMessageOp(w http.ResponseWriter, r *http.Request) {
 	body, err := ioutil.ReadAll(r.Body)
 	if err != nil {
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 87b6a5075c842d859a9d40b47666f2b81f96cd8f..7e001f3397feef9dc0384da6d38aa21078caa5e3 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -42,7 +42,7 @@ func checkGroupID(w http.ResponseWriter, needGroupID bool, group_id string, db_n
 	if  len(group_id) > 0 && len (group_id) < 100 && IsLetterOrNumbers(group_id) {
 		return true
 	}
-	err_str := "wrong groupid " + group_id
+	err_str := "wrong groupid name, check length or allowed charecters in " + group_id
 	log_str := "processing get " + op + " request in " + db_name + " at " + settings.GetDatabaseServer() + ": " + err_str
 	logger.Error(log_str)
 	w.WriteHeader(http.StatusBadRequest)
@@ -55,6 +55,10 @@ func checkBrokerApiVersion(w http.ResponseWriter, r *http.Request) bool {
 	return ok
 }
 
+func needWriteAccess(op string) bool {
+	return op=="delete_stream";
+}
+
 func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_param string, needGroupID bool) {
 	if ok := checkBrokerApiVersion(w, r); !ok {
 		return
@@ -68,7 +72,7 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 		return
 	}
 
-	if err := authorize(r, db_name); err != nil {
+	if err := authorize(r, db_name, needWriteAccess(op)); err != nil {
 		writeAuthAnswer(w, "get "+op, db_name, err)
 		return
 	}
diff --git a/broker/src/asapo_broker/server/process_request_test.go b/broker/src/asapo_broker/server/process_request_test.go
index 5ce3aa70bb26c91c332a0733d11d17c5b64fc1dd..97248769155270c0bd33a7b194b25c2721169576 100644
--- a/broker/src/asapo_broker/server/process_request_test.go
+++ b/broker/src/asapo_broker/server/process_request_test.go
@@ -17,7 +17,7 @@ import (
 	"time"
 )
 
-var correctTokenSuffix, wrongTokenSuffix, suffixWithWrongToken, expectedBeamtimeId, expectedDBName string
+var correctTokenSuffix, correctTokenSuffixWrite, wrongTokenSuffix, suffixWithWrongToken, expectedBeamtimeId, expectedDBName string
 
 const expectedGroupID = "bid2a5auidddp1vl71d0"
 const wrongGroupID = "_bid2a5auidddp1vl71"
@@ -27,19 +27,26 @@ const expectedStream = "stream"
 type MockAuthServer struct {
 }
 
-func (a * MockAuthServer) AuthorizeToken(tokenJWT string) (token Token, err error) {
-	if tokenJWT =="ok" {
+func (a *MockAuthServer) AuthorizeToken(tokenJWT string) (token Token, err error) {
+	if tokenJWT == "ok" {
 		return Token{
 			structs.IntrospectTokenResponse{
-			Sub:        "bt_"+expectedBeamtimeId,
-			AccessTypes: []string{"read"},
+				Sub:         "bt_" + expectedBeamtimeId,
+				AccessTypes: []string{"read"},
 			},
-		},nil
-	} else {
-		return Token{},errors.New("wrong JWT token")
+		}, nil
+	}
+	if tokenJWT == "ok_write" {
+		return Token{
+			structs.IntrospectTokenResponse{
+				Sub:         "bt_" + expectedBeamtimeId,
+				AccessTypes: []string{"read", "write"},
+			},
+		}, nil
 	}
-}
 
+	return Token{}, errors.New("wrong JWT token")
+}
 
 func prepareTestAuth() {
 	expectedBeamtimeId = "beamtime_id"
@@ -47,6 +54,7 @@ func prepareTestAuth() {
 
 	auth = &MockAuthServer{}
 	correctTokenSuffix = "?token=ok"
+	correctTokenSuffixWrite = "?token=ok_write"
 	wrongTokenSuffix = "?blablabla=aa"
 	suffixWithWrongToken = "?token=wrong"
 }
@@ -143,7 +151,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithNoToken() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusNoData, ""})
@@ -157,7 +165,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName()
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""),
 		&database.DBError{utils.StatusServiceUnavailable, ""})
@@ -173,8 +181,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, Op: "next"}
-
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
 
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte(""), errors.New(""))
 	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("processing request next")))
@@ -189,10 +196,9 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
-
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
 
 	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
@@ -207,7 +213,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWrongGroupID() {
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestAddsDataset() {
 
-	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId:expectedGroupID, DatasetOp:true, Op: "next"}
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, DatasetOp: true, Op: "next"}
 	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
@@ -215,8 +221,25 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestAddsDataset() {
 	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix + "&dataset=true")
 }
 
-
 func (suite *ProcessRequestTestSuite) TestProcessRequestErrorOnWrongProtocol() {
-	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix,"GET","","/v0.2")
+	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/"+expectedGroupID+"/next"+correctTokenSuffix, "GET", "", "/v1.2")
 	suite.Equal(http.StatusUnsupportedMediaType, w.Code, "wrong protocol")
 }
+
+func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamReadToken() {
+	query_str := "query_string"
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong token access")))
+	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/delete"+correctTokenSuffix, "POST", query_str)
+	suite.Equal(http.StatusUnauthorized, w.Code, "wrong token type")
+
+}
+
+func (suite *ProcessRequestTestSuite) TestProcessRequestDeleteStreamWriteToken() {
+	query_str := "query_string"
+
+	expectedRequest := database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: "", Op: "delete_stream", ExtraParam: query_str}
+	suite.mock_db.On("ProcessRequest", expectedRequest).Return([]byte("Hello"), nil)
+
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request delete_stream in "+expectedDBName)))
+	doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/delete"+correctTokenSuffixWrite, "POST", query_str)
+}
diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go
index 9476a5a7c0ced30369aa3f05be93360188c727a7..49b65f86308c8ddb05e4b09f2f44cf1b206c84ef 100644
--- a/broker/src/asapo_broker/server/request_common.go
+++ b/broker/src/asapo_broker/server/request_common.go
@@ -50,7 +50,7 @@ func datasetRequested(r *http.Request) (bool, int) {
 	return valueTrue(r, "dataset"), valueInt(r, "minsize")
 }
 
-func authorize(r *http.Request, beamtime_id string) error {
+func authorize(r *http.Request, beamtime_id string, needWriteAccess bool) error {
 	tokenJWT := r.URL.Query().Get("token")
 
 	if len(tokenJWT) == 0 {
@@ -67,7 +67,7 @@ func authorize(r *http.Request, beamtime_id string) error {
 		return err
 	}
 
-	return checkAccessType(token.AccessTypes)
+	return checkAccessType(token.AccessTypes,needWriteAccess)
 }
 
 func checkSubject(subject string, beamtime_id string) error {
@@ -77,7 +77,11 @@ func checkSubject(subject string, beamtime_id string) error {
 	return nil
 }
 
-func checkAccessType(accessTypes []string) error {
+func checkAccessType(accessTypes []string, needWriteAccess bool) error {
+	if needWriteAccess && !utils.StringInSlice("write",accessTypes) {
+		return errors.New("wrong token access type")
+	}
+
 	if !utils.StringInSlice("read",accessTypes) {
 		return errors.New("wrong token access type")
 	}
diff --git a/common/cpp/include/asapo/common/data_structs.h b/common/cpp/include/asapo/common/data_structs.h
index 96cdaf8c81ef046c05d7699d6e90004a947d149c..546203f29672d1e14b0614ed27b7152f9edb918f 100644
--- a/common/cpp/include/asapo/common/data_structs.h
+++ b/common/cpp/include/asapo/common/data_structs.h
@@ -95,7 +95,7 @@ struct SourceCredentials {
       user_token{std::move(token)},
       type{type} {};
   SourceCredentials() {};
-  static const std::string kDefaultStream;
+  static const std::string kDefaultDataSource;
   static const std::string kDefaultBeamline;
   static const std::string kDefaultBeamtimeId;
   std::string beamtime_id;
@@ -109,6 +109,33 @@ struct SourceCredentials {
   };
 };
 
+struct DeleteStreamOptions {
+ private:
+  enum DeleteStreamFlags : uint64_t {
+    kDeleteMeta = 1 << 0,
+    kErrorOnNotFound = 1 << 1,
+  };
+ public:
+  DeleteStreamOptions() = default;
+  DeleteStreamOptions(bool delete_meta,bool error_on_not_exist):delete_meta{delete_meta},error_on_not_exist{error_on_not_exist}{};
+  bool delete_meta{true};
+  bool error_on_not_exist{true};
+  uint64_t Encode() {
+      uint64_t flag = 0;
+      flag = delete_meta ? flag | DeleteStreamFlags::kDeleteMeta:flag;
+      flag = error_on_not_exist ? flag | DeleteStreamFlags::kErrorOnNotFound:flag;
+      return flag;
+  };
+  void Decode(uint64_t flag) {
+      delete_meta = (flag & DeleteStreamFlags::kDeleteMeta) > 0;
+      error_on_not_exist = (flag & DeleteStreamFlags::kErrorOnNotFound) > 0;
+  };
+  std::string Json() {
+      return std::string("{\"ErrorOnNotExist\":")+(error_on_not_exist?"true":"false")+",\"DeleteMeta\":"
+      +(delete_meta?"true":"false")+"}";
+  }
+};
+
 enum IngestModeFlags : uint64_t {
   kTransferData = 1 << 0,
   kTransferMetaDataOnly = 1 << 1,
@@ -124,7 +151,8 @@ class ClientProtocol {
   std::string discovery_version_;
   std::string name_;
  public:
-  ClientProtocol(std::string version, std::string name,std::string discovery_version) : version_{version}, name_{name} {
+  ClientProtocol(std::string version, std::string name, std::string discovery_version) : version_{version},
+                                                                                         name_{name} {
       discovery_version_ = discovery_version;
   };
   ClientProtocol() = delete;
@@ -153,7 +181,7 @@ class ConsumerProtocol final : public ClientProtocol {
                    std::string file_transfer_service_version,
                    std::string broker_version,
                    std::string rds_version)
-      : ClientProtocol(version, "consumer protocol",discovery_version) {
+      : ClientProtocol(version, "consumer protocol", discovery_version) {
       authorizer_version_ = authorizer_version;
       file_transfer_service_version_ = file_transfer_service_version;
       broker_version_ = broker_version;
@@ -184,7 +212,7 @@ class ProducerProtocol final : public ClientProtocol {
   ProducerProtocol(std::string version,
                    std::string discovery_version,
                    std::string receiver_version)
-      : ClientProtocol(version, "producer protocol",discovery_version) {
+      : ClientProtocol(version, "producer protocol", discovery_version) {
       receiver_version_ = receiver_version;
   };
   const std::string &GetReceiverVersion() const {
diff --git a/common/cpp/include/asapo/common/error.h b/common/cpp/include/asapo/common/error.h
index 24d78d5399cc7fa64307e44b63692596883dc2d1..d99b3aead862ff412c0ea59480405f359519df24 100644
--- a/common/cpp/include/asapo/common/error.h
+++ b/common/cpp/include/asapo/common/error.h
@@ -38,6 +38,7 @@ class ErrorInterface {
   public:
     virtual std::string Explain() const noexcept = 0;
     virtual void Append(const std::string& value) noexcept = 0;
+    virtual void Prepend(const std::string& value) noexcept = 0;
     virtual ErrorType GetErrorType() const noexcept = 0;
     virtual CustomErrorData* GetCustomData() = 0;
     virtual void SetCustomData(std::unique_ptr<CustomErrorData> data) = 0;
@@ -112,6 +113,10 @@ class SimpleError: public ErrorInterface {
         error_ += ": " + value;
     }
 
+    void Prepend(const std::string& value) noexcept override {
+        error_ = value+": "+error_;
+    }
+
     std::string Explain() const noexcept override  {
         return error_;
     }
diff --git a/common/cpp/include/asapo/common/internal/version.h.in b/common/cpp/include/asapo/common/internal/version.h.in
index 79fcea4136e61555e17ee7fc03f5d14d7f66a54a..5ac6f40174d868d9201e39e139c43fcbd08f9b1f 100644
--- a/common/cpp/include/asapo/common/internal/version.h.in
+++ b/common/cpp/include/asapo/common/internal/version.h.in
@@ -33,9 +33,17 @@ inline std::string GetRdsApiVersion() {
 }
 
 inline int VersionToNumber(const std::string& version) {
-    return int(atof(version.c_str()+2)*1000);
+    auto found = version.find(".");
+    if (found != std::string::npos)
+    {
+        auto maj = version.substr(1,found);
+        auto min = version.substr(found+1, std::string::npos);
+        return atoi(maj.c_str())*1000+atoi(min.c_str());
+    }
+    return 0;
 }
 
+
 Error ExtractVersionFromResponse(const std::string &response,
                                  const std::string &client,
                                  std::string* server_info,
diff --git a/common/cpp/include/asapo/common/networking.h b/common/cpp/include/asapo/common/networking.h
index 79f29abb4cefe2095cb06859f11611d0b229d142..bc8cae313624397fe059db3f01b8adb172a534a0 100644
--- a/common/cpp/include/asapo/common/networking.h
+++ b/common/cpp/include/asapo/common/networking.h
@@ -23,6 +23,7 @@ enum Opcode : uint8_t {
     kOpcodeTransferData,
     kOpcodeTransferDatasetData,
     kOpcodeStreamInfo,
+    kOpcodeDeleteStream,
     kOpcodeLastStream,
     kOpcodeGetBufferData,
     kOpcodeAuthorize,
diff --git a/common/cpp/include/asapo/database/database.h b/common/cpp/include/asapo/database/database.h
index d36b95322ffac117825b6fc21d81099ce5238aac..1fe87a7c6f5bc48c2317e816c2be34fdb3a9efcc 100644
--- a/common/cpp/include/asapo/database/database.h
+++ b/common/cpp/include/asapo/database/database.h
@@ -25,6 +25,7 @@ class Database {
     virtual Error GetDataSetById(const std::string& collection, uint64_t set_id, uint64_t id, MessageMeta* file) const = 0;
     virtual Error GetStreamInfo(const std::string& collection, StreamInfo* info) const  = 0;
     virtual Error GetLastStream(StreamInfo* info) const  = 0;
+    virtual Error DeleteStream(const std::string &stream) const = 0;
     virtual ~Database() = default;
 };
 
diff --git a/common/cpp/include/asapo/request/request_handler.h b/common/cpp/include/asapo/request/request_handler.h
index 6a5289bd1b7059d79c229fe324ee7ed3bcec10d6..9257f3b9600901fbac3c9c7ab13d3a6d660320f4 100644
--- a/common/cpp/include/asapo/request/request_handler.h
+++ b/common/cpp/include/asapo/request/request_handler.h
@@ -14,7 +14,7 @@ class RequestHandler {
     virtual void PrepareProcessingRequestLocked()  = 0;
     virtual void TearDownProcessingRequestLocked(bool success)  = 0;
     virtual bool ProcessRequestUnlocked(GenericRequest* request, bool* retry)  = 0;
-    virtual void ProcessRequestTimeout(GenericRequest* request)  = 0;
+    virtual void ProcessRequestTimeoutUnlocked(GenericRequest* request)  = 0;
     virtual bool ReadyProcessRequest() = 0;
     virtual ~RequestHandler() = default;
 };
diff --git a/common/cpp/include/asapo/unittests/MockDatabase.h b/common/cpp/include/asapo/unittests/MockDatabase.h
index 691e39af8f4b24c25216f6b5f99180dc8de4ab4d..e582496dfc93bc0352a3a64724851a749efe0a6e 100644
--- a/common/cpp/include/asapo/unittests/MockDatabase.h
+++ b/common/cpp/include/asapo/unittests/MockDatabase.h
@@ -10,68 +10,70 @@
 namespace asapo {
 
 class MockDatabase : public Database {
-  public:
-    Error Connect(const std::string& address, const std::string& database) override {
-        return Error{Connect_t(address, database)};
+ public:
+  Error Connect(const std::string &address, const std::string &database) override {
+      return Error{Connect_t(address, database)};
 
-    }
-    Error Insert(const std::string& collection, const MessageMeta& file, bool ignore_duplicates) const override {
-        return Error{Insert_t(collection, file, ignore_duplicates)};
-    }
+  }
+  Error Insert(const std::string &collection, const MessageMeta &file, bool ignore_duplicates) const override {
+      return Error{Insert_t(collection, file, ignore_duplicates)};
+  }
 
-    Error InsertAsDatasetMessage(const std::string& collection, const MessageMeta& file,
-                         uint64_t dataset_size, bool ignore_duplicates) const override {
-        return Error{InsertAsDatasetMessage_t(collection, file, dataset_size, ignore_duplicates)};
-    }
+  Error InsertAsDatasetMessage(const std::string &collection, const MessageMeta &file,
+                               uint64_t dataset_size, bool ignore_duplicates) const override {
+      return Error{InsertAsDatasetMessage_t(collection, file, dataset_size, ignore_duplicates)};
+  }
 
+  MOCK_METHOD2(Connect_t, ErrorInterface * (const std::string&, const std::string&));
+  MOCK_CONST_METHOD3(Insert_t, ErrorInterface * (const std::string&, const MessageMeta&, bool));
 
-    MOCK_METHOD2(Connect_t, ErrorInterface * (const std::string&, const std::string&));
-    MOCK_CONST_METHOD3(Insert_t, ErrorInterface * (const std::string&, const MessageMeta&, bool));
+  MOCK_CONST_METHOD4(InsertAsDatasetMessage_t,
+                     ErrorInterface * (const std::string&, const MessageMeta&, uint64_t, bool));
 
+  Error Upsert(const std::string &collection, uint64_t id, const uint8_t* data, uint64_t size) const override {
+      return Error{Upsert_t(collection, id, data, size)};
 
-    MOCK_CONST_METHOD4(InsertAsDatasetMessage_t, ErrorInterface * (const std::string&, const MessageMeta&, uint64_t, bool));
+  }
+  MOCK_CONST_METHOD4(Upsert_t, ErrorInterface * (const std::string&, uint64_t id, const uint8_t* data, uint64_t size));
 
+  Error GetById(const std::string &collection, uint64_t id, MessageMeta* file) const override {
+      return Error{GetById_t(collection, id, file)};
+  }
 
-    Error Upsert(const std::string& collection, uint64_t id, const uint8_t* data, uint64_t size) const override {
-        return Error{Upsert_t(collection, id, data, size)};
+  MOCK_CONST_METHOD3(GetById_t, ErrorInterface * (const std::string&, uint64_t id, MessageMeta*));
 
-    }
-    MOCK_CONST_METHOD4(Upsert_t, ErrorInterface * (const std::string&, uint64_t id, const uint8_t* data, uint64_t size));
+  Error GetDataSetById(const std::string &collection, uint64_t set_id, uint64_t id, MessageMeta* file) const override {
+      return Error{GetSetById_t(collection, set_id, id, file)};
+  }
 
-    Error GetById(const std::string& collection, uint64_t id, MessageMeta* file) const override {
-        return Error{GetById_t(collection, id, file)};
-    }
+  MOCK_CONST_METHOD4(GetSetById_t, ErrorInterface * (const std::string&, uint64_t set_id, uint64_t id, MessageMeta*));
 
-    MOCK_CONST_METHOD3(GetById_t, ErrorInterface * (const std::string&, uint64_t id, MessageMeta*));
+  Error GetStreamInfo(const std::string &collection, StreamInfo* info) const override {
+      return Error{GetStreamInfo_t(collection, info)};
+  }
 
+  MOCK_CONST_METHOD2(GetStreamInfo_t, ErrorInterface * (const std::string&, StreamInfo*));
 
-    Error GetDataSetById(const std::string& collection, uint64_t set_id, uint64_t id, MessageMeta* file) const override {
-        return Error{GetSetById_t(collection, set_id, id, file)};
-    }
+  Error GetLastStream(StreamInfo* info) const override {
+      return Error{GetLastStream_t(info)};
+  }
 
-    MOCK_CONST_METHOD4(GetSetById_t, ErrorInterface * (const std::string&, uint64_t set_id, uint64_t id, MessageMeta*));
+  MOCK_CONST_METHOD1(DeleteStream_t, ErrorInterface * (const std::string&));
 
+  Error DeleteStream(const std::string &stream) const override {
+      return Error{DeleteStream_t(stream)};
+  }
 
-    Error GetStreamInfo(const std::string& collection, StreamInfo* info) const override {
-        return Error{GetStreamInfo_t(collection, info)};
-    }
+  MOCK_CONST_METHOD1(GetLastStream_t, ErrorInterface* (StreamInfo*));
 
-    MOCK_CONST_METHOD2(GetStreamInfo_t, ErrorInterface * (const std::string&, StreamInfo*));
 
-    Error GetLastStream(StreamInfo* info) const override {
-        return Error{GetLastStream_t(info)};
-    }
-
-    MOCK_CONST_METHOD1(GetLastStream_t, ErrorInterface * (StreamInfo*));
-
-
-    // stuff to test db destructor is called and avoid "uninteresting call" messages
-    MOCK_METHOD0(Die, void());
-    virtual ~MockDatabase() override {
-        if (check_destructor)
-            Die();
-    }
-    bool check_destructor{false};
+  // stuff to test db destructor is called and avoid "uninteresting call" messages
+  MOCK_METHOD0(Die, void());
+  virtual ~MockDatabase() override {
+      if (check_destructor)
+          Die();
+  }
+  bool check_destructor{false};
 };
 
 }
diff --git a/common/cpp/src/data_structs/data_structs.cpp b/common/cpp/src/data_structs/data_structs.cpp
index 9dea46940fe52b975f7047cac6cf8615da17d253..16b960a7c005c21a443aee52210526efb289dbbc 100644
--- a/common/cpp/src/data_structs/data_structs.cpp
+++ b/common/cpp/src/data_structs/data_structs.cpp
@@ -17,7 +17,7 @@ using std::chrono::system_clock;
 
 namespace asapo {
 
-const std::string SourceCredentials::kDefaultStream = "detector";
+const std::string SourceCredentials::kDefaultDataSource = "detector";
 const std::string SourceCredentials::kDefaultBeamline = "auto";
 const std::string SourceCredentials::kDefaultBeamtimeId = "auto";
 
diff --git a/common/cpp/src/database/mongodb_client.cpp b/common/cpp/src/database/mongodb_client.cpp
index 8a35d66cff0345d372b42ea078933351860e6256..43a97531831d3dc097c7256fe7a45fca003297b0 100644
--- a/common/cpp/src/database/mongodb_client.cpp
+++ b/common/cpp/src/database/mongodb_client.cpp
@@ -238,8 +238,8 @@ Error MongoDBClient::AddBsonDocumentToArray(bson_t* query, bson_t* update, bool
 }
 
 Error MongoDBClient::InsertAsDatasetMessage(const std::string &collection, const MessageMeta &file,
-                                    uint64_t dataset_size,
-                                    bool ignore_duplicates) const {
+                                            uint64_t dataset_size,
+                                            bool ignore_duplicates) const {
     if (!connected_) {
         return DBErrorTemplates::kNotConnected.Generate();
     }
@@ -251,8 +251,9 @@ Error MongoDBClient::InsertAsDatasetMessage(const std::string &collection, const
     if (err) {
         return err;
     }
-    auto query = BCON_NEW ("$and", "[", "{", "_id", BCON_INT64(file.id), "}", "{", "messages.dataset_substream", "{", "$ne",
-                           BCON_INT64(file.dataset_substream), "}", "}", "]");
+    auto query =
+        BCON_NEW ("$and", "[", "{", "_id", BCON_INT64(file.id), "}", "{", "messages.dataset_substream", "{", "$ne",
+                  BCON_INT64(file.dataset_substream), "}", "}", "]");
     auto update = BCON_NEW ("$setOnInsert", "{",
                             "size", BCON_INT64(dataset_size),
                             "timestamp", BCON_INT64((int64_t) NanosecsEpochFromTimePoint(file.timestamp)),
@@ -334,7 +335,10 @@ Error MongoDBClient::GetById(const std::string &collection, uint64_t id, Message
     return nullptr;
 }
 
-Error MongoDBClient::GetDataSetById(const std::string &collection, uint64_t id_in_set, uint64_t id, MessageMeta* file) const {
+Error MongoDBClient::GetDataSetById(const std::string &collection,
+                                    uint64_t id_in_set,
+                                    uint64_t id,
+                                    MessageMeta* file) const {
     std::string record_str;
     auto err = GetRecordFromDb(collection, id, GetRecordMode::kById, &record_str);
     if (err) {
@@ -358,7 +362,7 @@ Error MongoDBClient::GetDataSetById(const std::string &collection, uint64_t id_i
 }
 
 Error UpdateStreamInfoFromEarliestRecord(const std::string &earliest_record_str,
-                               StreamInfo* info) {
+                                         StreamInfo* info) {
     std::chrono::system_clock::time_point timestamp_created;
     auto parser = JsonStringParser(earliest_record_str);
     auto ok = TimeFromJson(parser, "timestamp", &timestamp_created);
@@ -371,7 +375,7 @@ Error UpdateStreamInfoFromEarliestRecord(const std::string &earliest_record_str,
 }
 
 Error UpdateFinishedStreamInfo(const std::string &metadata,
-                                     StreamInfo* info) {
+                               StreamInfo* info) {
     info->finished = true;
     auto parser = JsonStringParser(metadata);
     std::string next_stream;
@@ -380,7 +384,7 @@ Error UpdateFinishedStreamInfo(const std::string &metadata,
         return DBErrorTemplates::kJsonParseError.Generate(
             "UpdateFinishedStreamInfo: cannot parse finished strean meta response: " + metadata);
     }
-    if (next_stream!=kNoNextStreamKeyword) {
+    if (next_stream != kNoNextStreamKeyword) {
         info->next_stream = next_stream;
     }
     return nullptr;
@@ -406,18 +410,17 @@ Error UpdateStreamInfoFromLastRecord(const std::string &last_record_str,
     return nullptr;
 }
 
-
 Error StreamInfoFromDbResponse(const std::string &last_record_str,
                                const std::string &earliest_record_str,
                                StreamInfo* info) {
     std::chrono::system_clock::time_point timestamp_created;
 
-    auto err = UpdateStreamInfoFromLastRecord(last_record_str,info);
+    auto err = UpdateStreamInfoFromLastRecord(last_record_str, info);
     if (err) {
         return err;
     }
 
-    return UpdateStreamInfoFromEarliestRecord(earliest_record_str,info);
+    return UpdateStreamInfoFromEarliestRecord(earliest_record_str, info);
 
 }
 
@@ -425,9 +428,6 @@ Error MongoDBClient::GetStreamInfo(const std::string &collection, StreamInfo* in
     std::string last_record_str, earliest_record_str;
     auto err = GetRecordFromDb(collection, 0, GetRecordMode::kLast, &last_record_str);
     if (err) {
-        if (err == DBErrorTemplates::kNoRecord) {
-            return nullptr;
-        }
         return err;
     }
     err = GetRecordFromDb(collection, 0, GetRecordMode::kEarliest, &earliest_record_str);
@@ -438,12 +438,12 @@ Error MongoDBClient::GetStreamInfo(const std::string &collection, StreamInfo* in
     return StreamInfoFromDbResponse(last_record_str, earliest_record_str, info);
 }
 
-bool MongoCollectionIsDataStream(const std::string &stream_name)  {
+bool MongoCollectionIsDataStream(const std::string &stream_name) {
     std::string prefix = std::string(kDBDataCollectionNamePrefix) + "_";
     return stream_name.rfind(prefix, 0) == 0;
 }
 
-Error MongoDBClient::UpdateCurrentLastStreamInfo(const std::string& collection_name, StreamInfo* info) const {
+Error MongoDBClient::UpdateCurrentLastStreamInfo(const std::string &collection_name, StreamInfo* info) const {
     StreamInfo next_info;
     auto err = GetStreamInfo(collection_name, &next_info);
     std::string prefix = std::string(kDBDataCollectionNamePrefix) + "_";
@@ -457,7 +457,6 @@ Error MongoDBClient::UpdateCurrentLastStreamInfo(const std::string& collection_n
     return nullptr;
 }
 
-
 Error MongoDBClient::UpdateLastStreamInfo(const char* str, StreamInfo* info) const {
     std::string collection_name{str};
     if (MongoCollectionIsDataStream(collection_name)) {
@@ -503,4 +502,74 @@ Error MongoDBClient::GetLastStream(StreamInfo* info) const {
     return err;
 }
 
+
+Error MongoDBClient::DeleteCollections(const std::string &prefix) const {
+    mongoc_database_t* database;
+    char** strv;
+    bson_error_t error;
+    std::string querystr = "^" + prefix;
+    bson_t* query = BCON_NEW ("name", BCON_REGEX(querystr.c_str(), "i"));
+    bson_t* opts = BCON_NEW ("nameOnly", BCON_BOOL(true),"filter",BCON_DOCUMENT(query));
+    database = mongoc_client_get_database(client_, database_name_.c_str());
+    Error err;
+    if ((strv = mongoc_database_get_collection_names_with_opts(
+        database, opts, &error))) {
+        for (auto i = 0; strv[i]; i++) {
+            DeleteCollection(strv[i]);
+        }
+        bson_strfreev(strv);
+    } else {
+        err = DBErrorTemplates::kDBError.Generate(error.message);
+    }
+
+    bson_destroy(opts);
+    bson_destroy(query);
+    mongoc_database_destroy(database);
+    return nullptr;
+}
+
+Error MongoDBClient::DeleteCollection(const std::string &name) const {
+    bson_error_t error;
+    auto collection = mongoc_client_get_collection(client_, database_name_.c_str(), name.c_str());
+    mongoc_collection_set_write_concern(collection, write_concern_);
+    auto r = mongoc_collection_drop_with_opts(collection, NULL /* opts */, &error);
+    mongoc_collection_destroy(collection);
+    if (!r) {
+        if (error.code == 26) {
+            return DBErrorTemplates::kNoRecord.Generate("collection "+name+" not found in "+database_name_);
+        } else {
+            return DBErrorTemplates::kDBError.Generate(std::string(error.message)+": "+std::to_string(error.code));
+        }
+    }
+    return nullptr;
+}
+
+Error MongoDBClient::DeleteDocumentsInCollection(const std::string &collection_name,const std::string &querystr) const {
+    auto collection = mongoc_client_get_collection(client_, database_name_.c_str(), collection_name.c_str());
+    mongoc_collection_set_write_concern(collection, write_concern_);
+    bson_error_t error;
+    auto query = BCON_NEW ("_id", BCON_REGEX(querystr.c_str(), "i"));
+    if (!mongoc_collection_delete_many(collection, query, NULL,NULL, &error)) {
+        return DBErrorTemplates::kDBError.Generate(error.message);
+    }
+    mongoc_collection_destroy(collection);
+    bson_destroy(query);
+    return nullptr;
+}
+
+Error MongoDBClient::DeleteStream(const std::string &stream) const {
+    std::string data_col = std::string(kDBDataCollectionNamePrefix) + "_" + stream;
+    std::string inprocess_col = "inprocess_" + stream;
+    std::string acks_col = "acks_" + stream;
+    current_collection_name_ = "";
+    auto err = DeleteCollection(data_col);
+    if (err == nullptr) {
+        DeleteCollections(inprocess_col);
+        DeleteCollections(acks_col);
+        std::string querystr = ".*_" + stream+"$";
+        DeleteDocumentsInCollection("current_location",querystr);
+    }
+    return err;
+}
+
 }
diff --git a/common/cpp/src/database/mongodb_client.h b/common/cpp/src/database/mongodb_client.h
index 858bb6d073a329c93e5c159dd945a67d6a0aa3d9..226c134b4d0e17d3ddab76b9fe4b30c31734d7db 100644
--- a/common/cpp/src/database/mongodb_client.h
+++ b/common/cpp/src/database/mongodb_client.h
@@ -51,6 +51,7 @@ class MongoDBClient final : public Database {
     Error GetDataSetById(const std::string& collection, uint64_t id_in_set, uint64_t id, MessageMeta* file) const override;
     Error GetStreamInfo(const std::string& collection, StreamInfo* info) const override;
     Error GetLastStream(StreamInfo* info) const override;
+    Error DeleteStream(const std::string &stream) const override;
     ~MongoDBClient() override;
   private:
     mongoc_client_t* client_{nullptr};
@@ -71,7 +72,9 @@ class MongoDBClient final : public Database {
     Error GetRecordFromDb(const std::string& collection, uint64_t id, GetRecordMode mode, std::string* res) const;
     Error UpdateLastStreamInfo(const char *str, StreamInfo* info) const;
     Error UpdateCurrentLastStreamInfo(const std::string& collection_name, StreamInfo* info) const;
-
+    Error DeleteCollection(const std::string& name) const;
+    Error DeleteCollections(const std::string &prefix) const;
+    Error DeleteDocumentsInCollection(const std::string &collection_name,const std::string &querystr) const;
 };
 
 }
diff --git a/common/cpp/src/request/request_pool.cpp b/common/cpp/src/request/request_pool.cpp
index dc2573b67c47bac923fe8ff8843eeed50a38fe44..32d0cd99943b67e3cdea3e9b5bbefa60b0e3e981 100644
--- a/common/cpp/src/request/request_pool.cpp
+++ b/common/cpp/src/request/request_pool.cpp
@@ -107,7 +107,9 @@ void RequestPool::ProcessRequest(const std::unique_ptr<RequestHandler> &request_
                                  ThreadInformation* thread_info) {
     auto request = GetRequestFromQueue();
     if (request->TimedOut()) {
-        request_handler->ProcessRequestTimeout(request.get());
+        thread_info->lock.unlock();
+        request_handler->ProcessRequestTimeoutUnlocked(request.get());
+        thread_info->lock.lock();
         return;
     }
     request_handler->PrepareProcessingRequestLocked();
diff --git a/common/cpp/unittests/data_structs/test_data_structs.cpp b/common/cpp/unittests/data_structs/test_data_structs.cpp
index e6507bd21cf36d5c8cde8258a7070519ab3de026..0e91e22167dd9e0d825869c73117d4b3cbe7f5b9 100644
--- a/common/cpp/unittests/data_structs/test_data_structs.cpp
+++ b/common/cpp/unittests/data_structs/test_data_structs.cpp
@@ -9,6 +9,7 @@ using asapo::MessageMeta;
 using asapo::StreamInfo;
 using asapo::SourceType;
 using asapo::SourceCredentials;
+using asapo::DeleteStreamOptions;
 
 using ::testing::AtLeast;
 using ::testing::Eq;
@@ -254,5 +255,53 @@ TEST(MessageMetaTests, ISODateFromNanosecsEpoch) {
     }
 }
 
+TEST(DeletaStreamOpt, ConvertToJson) {
+    auto opts = DeleteStreamOptions{};
+
+    std::string expected_json = "{\"ErrorOnNotExist\":true,\"DeleteMeta\":true}";
+    auto json = opts.Json();
+
+    ASSERT_THAT(json,Eq(expected_json));
+}
+
+TEST(DeletaStreamOpt, ConvertToJson2) {
+    auto opts = DeleteStreamOptions{};
+    opts.delete_meta = false;
+    opts.error_on_not_exist = false;
+
+    std::string expected_json = "{\"ErrorOnNotExist\":false,\"DeleteMeta\":false}";
+    auto json = opts.Json();
+
+    ASSERT_THAT(json,Eq(expected_json));
+}
+
+TEST(DeletaStreamOpt, EncodeDecode) {
+    auto opts = DeleteStreamOptions{};
+    ASSERT_THAT(opts.Encode(),Eq(3));
+    opts.delete_meta = false;
+    ASSERT_THAT(opts.Encode(),Eq(2));
+    opts.error_on_not_exist = false;
+    ASSERT_THAT(opts.Encode(),Eq(0));
+    opts.delete_meta = true;
+    ASSERT_THAT(opts.Encode(),Eq(1));
+
+    opts.Decode(0);
+    ASSERT_THAT(opts.error_on_not_exist,Eq(false));
+    ASSERT_THAT(opts.delete_meta,Eq(false));
+
+    opts.Decode(1);
+    ASSERT_THAT(opts.error_on_not_exist,Eq(false));
+    ASSERT_THAT(opts.delete_meta,Eq(true));
+
+    opts.Decode(2);
+    ASSERT_THAT(opts.error_on_not_exist,Eq(true));
+    ASSERT_THAT(opts.delete_meta,Eq(false));
+
+    opts.Decode(3);
+    ASSERT_THAT(opts.error_on_not_exist,Eq(true));
+    ASSERT_THAT(opts.delete_meta,Eq(true));
+
+
+}
 
 }
diff --git a/common/cpp/unittests/request/mocking.h b/common/cpp/unittests/request/mocking.h
index d3687f9469d568a1c831e1335d619b1a3dbbe935..2b157a1b33210855affbd42ac65bd42c72b6b74b 100644
--- a/common/cpp/unittests/request/mocking.h
+++ b/common/cpp/unittests/request/mocking.h
@@ -16,7 +16,7 @@ class MockRequestHandler : public RequestHandler {
     MOCK_METHOD0(ReadyProcessRequest, bool());
     MOCK_METHOD1(TearDownProcessingRequestLocked, void(bool processing_succeeded));
     MOCK_METHOD2(ProcessRequestUnlocked_t, bool (const GenericRequest* request, bool* retry));
-    MOCK_METHOD1(ProcessRequestTimeout, void(GenericRequest* request));
+    MOCK_METHOD1(ProcessRequestTimeoutUnlocked, void(GenericRequest* request));
     uint64_t retry_counter = 0;
     bool ProcessRequestUnlocked(GenericRequest* request, bool* retry)  override {
         retry_counter = request->GetRetryCounter();
diff --git a/common/cpp/unittests/request/test_request_pool.cpp b/common/cpp/unittests/request/test_request_pool.cpp
index 44bfbb8c410578c7ae4be49d03ad8238ad6d15b1..be2a938e227051524dbc13ad07dc7681de77db29 100644
--- a/common/cpp/unittests/request/test_request_pool.cpp
+++ b/common/cpp/unittests/request/test_request_pool.cpp
@@ -109,7 +109,7 @@ TEST_F(RequestPoolTests, TimeOut) {
     EXPECT_CALL(*mock_request_handler, ReadyProcessRequest()).Times(1).WillRepeatedly(Return(true));
     EXPECT_CALL(*mock_request_handler, PrepareProcessingRequestLocked()).Times(0);
     EXPECT_CALL(*mock_request_handler, ProcessRequestUnlocked_t(_, _)).Times(0);
-    EXPECT_CALL(*mock_request_handler, ProcessRequestTimeout(_)).Times(1);
+    EXPECT_CALL(*mock_request_handler, ProcessRequestTimeoutUnlocked(_)).Times(1);
 
     auto err = pool.AddRequest(std::move(request));
     std::this_thread::sleep_for(std::chrono::milliseconds(10));
diff --git a/common/go/src/asapo_common/utils/version.go b/common/go/src/asapo_common/utils/version.go
index 5e8e03a9dff6ade41f5df8bf421d03bc2ff91987..3b4c6b2601e5f992df55d2731b51a27d640a4656 100644
--- a/common/go/src/asapo_common/utils/version.go
+++ b/common/go/src/asapo_common/utils/version.go
@@ -9,38 +9,55 @@ import (
 )
 
 
-func VersionToNumber(ver string) int {
+type VersionNum struct{
+	Major int
+	Minor int
+	Id int
+}
+
+func ParseVersion(ver string) (result VersionNum,err error ) {
 	ver = strings.TrimPrefix(ver,"v")
-	floatNum, err := strconv.ParseFloat(ver, 64)
+	vers := strings.Split(ver,".")
+	if len(vers)!=2 {
+		err = errors.New("cannot parse version")
+		return
+	}
+	maj, err := strconv.Atoi(vers[0])
 	if err!=nil {
-		return 0
+		err = errors.New("cannot parse version")
+		return
 	}
-	return int(floatNum*1000)
+	min, err := strconv.Atoi(vers[1])
+	if err!=nil {
+		err = errors.New("cannot parse version")
+		return
+	}
+	result.Major = maj
+	result.Minor = min
+	result.Id = maj*1000+min
+	return
 }
 
 
-func ExtractVersion(r *http.Request) (int, error) {
+func ExtractVersion(r *http.Request) (VersionNum,error ) {
 	vars := mux.Vars(r)
 	ver_str, ok := vars["apiver"]
 	if !ok {
-		return 0, errors.New("cannot extract version")
-	}
-	ver := VersionToNumber(ver_str)
-	if ver == 0 {
-		return 0, errors.New("cannot extract version")
+		return VersionNum{},errors.New("cannot extract version")
 	}
-	return ver, nil
+	return ParseVersion(ver_str)
 }
 
-func PrecheckApiVersion(w http.ResponseWriter, r *http.Request, currentVersion string) (apiVer int, ok bool) {
-	apiVer, err := ExtractVersion(r)
+func PrecheckApiVersion(w http.ResponseWriter, r *http.Request, currentVersion string) (VersionNum, bool) {
+	ver, err := ExtractVersion(r)
 	if err != nil {
 		WriteServerError(w, err, http.StatusBadRequest)
-		return 0, false
+		return VersionNum{}, false
 	}
-	if apiVer > VersionToNumber(currentVersion) {
+	curVer,_ := ParseVersion(currentVersion)
+	if ver.Id > curVer.Id {
 		WriteServerError(w, errors.New("version not supported"), http.StatusUnsupportedMediaType)
-		return 0, false
+		return VersionNum{}, false
 	}
-	return apiVer, true
+	return ver, true
 }
diff --git a/consumer/api/cpp/include/asapo/consumer/consumer.h b/consumer/api/cpp/include/asapo/consumer/consumer.h
index dba769567f4fec4dbd39de384f50f79205a024bc..ce97157ab09c6e7f12581e81aa8086da51c9c7af 100644
--- a/consumer/api/cpp/include/asapo/consumer/consumer.h
+++ b/consumer/api/cpp/include/asapo/consumer/consumer.h
@@ -90,6 +90,15 @@ class Consumer {
   //! Get list of streams with filter, set from to "" to get all streams
     virtual StreamInfos GetStreamList(std::string from,  StreamFilter filter, Error* err) = 0;
 
+  //! Delete stream
+  /*!
+    \param stream - stream to send messages to
+    \param options - delete stream options
+    \return Error - will be nullptr on success
+  */
+    virtual Error DeleteStream(std::string stream, DeleteStreamOptions options) = 0;
+
+
     //! Get current number of messages in stream
     /*!
       \param stream - stream to use
diff --git a/consumer/api/cpp/src/consumer_impl.cpp b/consumer/api/cpp/src/consumer_impl.cpp
index 32ecf6a3671b00e337f0bc78c0887712714c4404..b6eb00b86c9f880014ac1e5cd1a7d70f13593829 100644
--- a/consumer/api/cpp/src/consumer_impl.cpp
+++ b/consumer/api/cpp/src/consumer_impl.cpp
@@ -80,6 +80,7 @@ Error ConsumerErrorFromNoDataResponse(const std::string &response) {
 Error ConsumerErrorFromHttpCode(const RequestOutput* response, const HttpCode &code) {
     switch (code) {
         case HttpCode::OK:return nullptr;
+        case HttpCode::NoContent:return nullptr;
         case HttpCode::PartialContent:return ConsumerErrorFromPartialDataResponse(response->to_string());
         case HttpCode::BadRequest:return ConsumerErrorTemplates::kWrongInput.Generate(response->to_string());
         case HttpCode::Unauthorized:return ConsumerErrorTemplates::kWrongInput.Generate(response->to_string());
@@ -92,19 +93,29 @@ Error ConsumerErrorFromHttpCode(const RequestOutput* response, const HttpCode &c
 }
 Error ConsumerErrorFromServerError(const Error &server_err) {
     if (server_err == HttpErrorTemplates::kTransferError) {
-        return ConsumerErrorTemplates::kInterruptedTransaction.Generate(
-            "error processing request: " + server_err->Explain());
+        return ConsumerErrorTemplates::kInterruptedTransaction.Generate(server_err->Explain());
     } else {
-        return ConsumerErrorTemplates::kUnavailableService.Generate(
-            "error processing request: " + server_err->Explain());
+        return ConsumerErrorTemplates::kUnavailableService.Generate(server_err->Explain());
     }
 }
 
-Error ProcessRequestResponce(const Error &server_err, const RequestOutput* response, const HttpCode &code) {
+Error ProcessRequestResponce(const RequestInfo &request,
+                             const Error &server_err,
+                             const RequestOutput* response,
+                             const HttpCode &code) {
+    Error err;
     if (server_err != nullptr) {
-        return ConsumerErrorFromServerError(server_err);
+        err =  ConsumerErrorFromServerError(server_err);
+    } else {
+        err =  ConsumerErrorFromHttpCode(response, code);
     }
-    return ConsumerErrorFromHttpCode(response, code);
+
+    if (err!=nullptr) {
+        std::string prefix = "Error processing request" + request.api;
+        err->Prepend(prefix);
+    }
+    return err;
+
 }
 
 ConsumerImpl::ConsumerImpl(std::string server_uri,
@@ -118,7 +129,7 @@ ConsumerImpl::ConsumerImpl(std::string server_uri,
     // net_client__ will be lazy initialized
 
     if (source_credentials_.data_source.empty()) {
-        source_credentials_.data_source = SourceCredentials::kDefaultStream;
+        source_credentials_.data_source = SourceCredentials::kDefaultDataSource;
     }
 
 }
@@ -178,7 +189,7 @@ Error ConsumerImpl::ProcessRequest(RequestOutput* response, const RequestInfo &r
     if (err && service_uri) {
         service_uri->clear();
     }
-    return ProcessRequestResponce(err, response, code);
+    return ProcessRequestResponce(request, err, response, code);
 }
 
 RequestInfo ConsumerImpl::GetDiscoveryRequest(const std::string &service_name) const {
@@ -938,7 +949,7 @@ Error ConsumerImpl::GetServerVersionInfo(std::string* server_info, bool* support
     if (err) {
         return err;
     }
-    return ExtractVersionFromResponse(output.string_output,"consumer",server_info,supported);
+    return ExtractVersionFromResponse(output.string_output, "consumer", server_info, supported);
 }
 
 Error ConsumerImpl::GetVersionInfo(std::string* client_info, std::string* server_info, bool* supported) {
@@ -951,10 +962,28 @@ Error ConsumerImpl::GetVersionInfo(std::string* client_info, std::string* server
     }
 
     if (server_info != nullptr || supported != nullptr) {
-        return GetServerVersionInfo(server_info,supported);
+        return GetServerVersionInfo(server_info, supported);
     }
 
     return nullptr;
 }
 
+RequestInfo ConsumerImpl::GetDeleteStreamRequest(std::string stream, DeleteStreamOptions options) const {
+    RequestInfo ri;
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
+        +"/" + std::move(stream) +
+        "/delete";
+    ri.post = true;
+    ri.body = options.Json();
+    return ri;
+}
+
+Error ConsumerImpl::DeleteStream(std::string stream, DeleteStreamOptions options) {
+    auto ri = GetDeleteStreamRequest(std::move(stream), options);
+    Error err;
+    BrokerRequestWithTimeout(ri, &err);
+    return err;
+}
+
 }
\ No newline at end of file
diff --git a/consumer/api/cpp/src/consumer_impl.h b/consumer/api/cpp/src/consumer_impl.h
index ef0ee3ac8679b32d415dee65442565e28da66558..7f7411c99e8984ea1b1c58ae7b9d747e79f3177d 100644
--- a/consumer/api/cpp/src/consumer_impl.h
+++ b/consumer/api/cpp/src/consumer_impl.h
@@ -46,7 +46,7 @@ struct RequestOutput {
     }
 };
 
-Error ProcessRequestResponce(const Error& server_err, const RequestOutput* response, const HttpCode& code);
+Error ProcessRequestResponce(const RequestInfo& request, const Error& server_err, const RequestOutput* response, const HttpCode& code);
 Error ConsumerErrorFromNoDataResponse(const std::string& response);
 Error ConsumerErrorFromPartialDataResponse(const std::string& response);
 DataSet DecodeDatasetFromResponse(std::string response, Error* err);
@@ -85,7 +85,7 @@ class ConsumerImpl final : public asapo::Consumer {
     Error GetById(uint64_t id, MessageMeta* info, MessageData* data, std::string stream) override;
 
     Error GetVersionInfo(std::string* client_info,std::string* server_info, bool* supported) override;
-
+    Error DeleteStream(std::string stream, DeleteStreamOptions options) override;
     void SetTimeout(uint64_t timeout_ms) override;
     void ForceNoRdma() override;
 
@@ -172,7 +172,7 @@ class ConsumerImpl final : public asapo::Consumer {
   uint64_t ParseGetCurrentCountResponce(Error* err, const std::string &responce) const;
   RequestInfo GetDiscoveryRequest(const std::string &service_name) const;
   RequestInfo GetVersionRequest() const;
-
+  RequestInfo GetDeleteStreamRequest(std::string stream, DeleteStreamOptions options) const;
 };
 
 }
diff --git a/consumer/api/cpp/unittests/test_consumer_impl.cpp b/consumer/api/cpp/unittests/test_consumer_impl.cpp
index be641e76cd2c11787ba1f763aa4a75592300e762..94cbd109aa2b77dfff47b32c521b5beefca39f26 100644
--- a/consumer/api/cpp/unittests/test_consumer_impl.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_impl.cpp
@@ -71,7 +71,7 @@ class ConsumerImplTests : public Test {
   MessageMeta info;
   std::string expected_server_uri = "test:8400";
   std::string expected_broker_uri = "asapo-broker:5005";
-  std::string expected_consumer_protocol = "v0.1";
+  std::string expected_consumer_protocol = "v0.2";
   std::string expected_broker_api = expected_broker_uri + "/" + expected_consumer_protocol;
   std::string expected_fts_uri = "asapo-file-transfer:5008";
   std::string expected_token = "token";
@@ -143,7 +143,7 @@ class ConsumerImplTests : public Test {
   }
   void MockGetServiceUri(std::string service, std::string result) {
       EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/" + service+"?token="
-          + expected_token+"&protocol=v0.1"), _,
+          + expected_token+"&protocol="+expected_consumer_protocol), _,
                                           _)).WillOnce(DoAll(
           SetArgPointee<1>(HttpCode::OK),
           SetArgPointee<2>(nullptr),
@@ -1058,6 +1058,25 @@ TEST_F(ConsumerImplTests, GetDatasetByIdUsesCorrectUri) {
     consumer->GetDatasetById(expected_dataset_id, 0, expected_stream, &err);
 }
 
+TEST_F(ConsumerImplTests, DeleteStreamUsesCorrectUri) {
+    MockGetBrokerUri();
+    std::string expected_delete_stream_query_string = "{\"ErrorOnNotExist\":true,\"DeleteMeta\":true}";
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/"+expected_stream+"/delete"
+                                             + "?token=" + expected_token, _,
+                                         expected_delete_stream_query_string, _, _)).WillOnce(DoAll(
+        SetArgPointee<3>(HttpCode::OK),
+        SetArgPointee<4>(nullptr),
+        Return("")
+    ));
+
+    asapo::DeleteStreamOptions opt;
+    opt.delete_meta = true;
+    opt.error_on_not_exist = true;
+    auto err = consumer->DeleteStream(expected_stream,opt);
+    ASSERT_THAT(err, Eq(nullptr));
+
+}
+
 TEST_F(ConsumerImplTests, GetStreamListUsesCorrectUri) {
     MockGetBrokerUri();
     std::string return_streams =
@@ -1382,7 +1401,7 @@ TEST_F(ConsumerImplTests, GetVersionInfoWithServer) {
 
     std::string result = R"({"softwareVersion":"20.03.1, build 7a9294ad","clientSupported":"no", "clientProtocol":{"versionInfo":"v0.2"}})";
 
-    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?token=token&client=consumer&protocol=v0.1"), _,_)).WillOnce(DoAll(
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?token=token&client=consumer&protocol="+expected_consumer_protocol), _,_)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
         SetArgPointee<2>(nullptr),
         Return(result)));
diff --git a/consumer/api/python/asapo_consumer.pxd b/consumer/api/python/asapo_consumer.pxd
index fa755a375a83d05f4469cb4fad4030ea0130caf2..e2c7846b82f3dbee3e570a7a04a0667016b1c411 100644
--- a/consumer/api/python/asapo_consumer.pxd
+++ b/consumer/api/python/asapo_consumer.pxd
@@ -59,6 +59,9 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
   StreamFilter StreamFilter_kAllStreams "asapo::StreamFilter::kAllStreams"
   StreamFilter StreamFilter_kFinishedStreams "asapo::StreamFilter::kFinishedStreams"
   StreamFilter StreamFilter_kUnfinishedStreams "asapo::StreamFilter::kUnfinishedStreams"
+  struct DeleteStreamOptions:
+    bool delete_meta
+    bool error_on_not_exist
 
 cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass Consumer:
@@ -88,6 +91,7 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
         void SetResendNacs(bool resend, uint64_t delay_ms, uint64_t resend_attempts)
         void InterruptCurrentOperation()
         Error GetVersionInfo(string* client_info,string* server_info, bool* supported)
+        Error DeleteStream(string stream, DeleteStreamOptions options)
 
 cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass ConsumerFactory:
diff --git a/consumer/api/python/asapo_consumer.pyx.in b/consumer/api/python/asapo_consumer.pyx.in
index 1db15ed4a72531c1bb9403c632a4ea9231f4da58..6825c2e3ec5d029cba9f505d20ff3080a45ae3cb 100644
--- a/consumer/api/python/asapo_consumer.pyx.in
+++ b/consumer/api/python/asapo_consumer.pyx.in
@@ -292,6 +292,26 @@ cdef class PyConsumer:
             throw_exception(err)
         return id
 
+    def delete_stream(self, stream = 'default', bool error_on_not_exist = True):
+        """
+         :param stream: stream name
+         :type stream: string
+         :param error_on_not_exist: will emit AsapoWrongInputError if set to true and tries to delete non-existing stream
+         :type error_on_not_exist: bool
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoTimeoutError: request not finished for a given timeout
+            AsapoProducerError: other errors
+        """
+        cdef Error err
+        cdef DeleteStreamOptions opts
+        cdef string b_stream = _bytes(stream)
+        opts.error_on_not_exist = error_on_not_exist
+        with nogil:
+            err = self.c_consumer.get().DeleteStream(b_stream,opts)
+        if err:
+            throw_exception(err)
+
     def get_unacknowledged_messages(self, group_id, uint64_t from_id = 0, uint64_t to_id = 0, stream = "default"):
         cdef Error err
         cdef string b_group_id = _bytes(group_id)
diff --git a/deploy/asapo_services/scripts/discovery.json.tpl b/deploy/asapo_services/scripts/discovery.json.tpl
index 98bfa9a4e41da9c19fcebb3c24503d75946f6b9e..0dc045609c0a072f86bc5af6140ce32c516aa117 100644
--- a/deploy/asapo_services/scripts/discovery.json.tpl
+++ b/deploy/asapo_services/scripts/discovery.json.tpl
@@ -2,7 +2,7 @@
   "Mode": "consul",
   "Receiver": {
     "MaxConnections": 32,
-    "UseIBAddress": {{ keyOrDefault "use_ib_for_receiver" "true" }}
+    "UseIBAddress": {{ keyOrDefault "use_ib_for_receiver" "false" }}
   },
   "Port": {{ env "NOMAD_PORT_discovery" }},
   "LogLevel": "{{ keyOrDefault "log_level" "info" }}"
diff --git a/deploy/asapo_services/scripts/receiver.json.tpl b/deploy/asapo_services/scripts/receiver.json.tpl
index e5214deed638118fac712fe6dced5bf56ebaf790..2a69b27e93c39ed122e3175d9a82beae8ffe582c 100644
--- a/deploy/asapo_services/scripts/receiver.json.tpl
+++ b/deploy/asapo_services/scripts/receiver.json.tpl
@@ -8,7 +8,7 @@
   "AuthorizationInterval": 10000,
   "ListenPort": {{ env "NOMAD_PORT_recv" }},
   "DataServer": {
-    "AdvertiseURI": "{{ if or (env "meta.ib_address") "none" | regexMatch "none" }}{{ env "NOMAD_IP_recv" }}{{ else }}{{ env "meta.ib_address" }}{{ end }}:{{ env "NOMAD_PORT_recv_ds" }}",
+    "AdvertiseURI": "{{ if env "NOMAD_META_receiver_network_modes" | regexMatch "tcp" }}{{ env "NOMAD_IP_recv" }}{{ else if or (env "meta.ib_address") "none" | regexMatch "none" }}{{ env "NOMAD_IP_recv" }}{{ else }}{{ env "meta.ib_address" }}{{ end }}:{{ env "NOMAD_PORT_recv_ds" }}",
     "NThreads": {{ env "NOMAD_META_receiver_dataserver_nthreads" }},
     "ListenPort": {{ env "NOMAD_PORT_recv_ds" }},
     "NetworkMode": ["{{ if or (env "meta.ib_address") "none" | regexMatch "none" }}{{ printf "%s" "tcp" }}{{ else }}{{ env "NOMAD_META_receiver_network_modes" |  split "," | join "\",\"" }}{{ end }}"]
diff --git a/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go b/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go
index 15a8f7b5c89c2f31b4c029279254523aa2d0c3db..cb76a8fddd5f2250ca2c3168392c953cd22af02a 100644
--- a/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go
+++ b/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go
@@ -1,7 +1,25 @@
 package protocols
 
+import "time"
+
+func getTimefromDate(date string) time.Time{
+	res,err := time.Parse("2006-01-02", date)
+	if err!=nil {
+		panic(err)
+	}
+	return res
+}
+
 func GetSupportedConsumerProtocols() []Protocol {
 	return []Protocol{
+		Protocol{"v0.2",
+			map[string]string{
+				"Discovery": "v0.1",
+				"Authorizer": "v0.1",
+				"Broker": "v0.2",
+				"File Transfer": "v0.1",
+				"Data cache service": "v0.1",
+			}, &protocolValidatorCurrent{}},
 		Protocol{"v0.1",
 			map[string]string{
 				"Discovery": "v0.1",
@@ -9,6 +27,6 @@ func GetSupportedConsumerProtocols() []Protocol {
 				"Broker": "v0.1",
 				"File Transfer": "v0.1",
 				"Data cache service": "v0.1",
-			}, &protocolValidatorCurrent{}},
+			}, &protocolValidatorDeprecated{getTimefromDate("2022-06-01")}},
 	}
 }
diff --git a/discovery/src/asapo_discovery/protocols/hard_coded_producer.go b/discovery/src/asapo_discovery/protocols/hard_coded_producer.go
index 515e242930e9c55231cb35dbc830cf25eb2180ec..a361478ccf442da1badfae0bc56c87b61164fc0f 100644
--- a/discovery/src/asapo_discovery/protocols/hard_coded_producer.go
+++ b/discovery/src/asapo_discovery/protocols/hard_coded_producer.go
@@ -2,11 +2,16 @@ package protocols
 
 func GetSupportedProducerProtocols() []Protocol {
 	return []Protocol{
+		Protocol{"v0.2",
+			map[string]string{
+				"Discovery": "v0.1",
+				"Receiver": "v0.2",
+			}, &protocolValidatorCurrent{}},
 		Protocol{"v0.1",
 			map[string]string{
 				"Discovery": "v0.1",
 				"Receiver": "v0.1",
-			}, &protocolValidatorCurrent{}},
+			}, &protocolValidatorDeprecated{getTimefromDate("2022-06-01")}},
 	}
 }
 
diff --git a/discovery/src/asapo_discovery/protocols/protocol_test.go b/discovery/src/asapo_discovery/protocols/protocol_test.go
index 458ff5589920fdbbe4111689fe7fd29cec50a5fa..52c60833a795285fe62f4c09afb7681e03db4e1a 100644
--- a/discovery/src/asapo_discovery/protocols/protocol_test.go
+++ b/discovery/src/asapo_discovery/protocols/protocol_test.go
@@ -15,13 +15,15 @@ type protocolTest struct {
 
 var protocolTests = []protocolTest{
 // consumer
-	{"consumer", "v0.1", true, "", "current protocol"},
-	{"consumer", "v0.2", false, "unknown", "unknown protocol"},
+	{"consumer", "v0.2", true, "current", "v0.2"},
+	{"consumer", "v0.1", true, "deprecates", "v0.1"},
+	{"consumer", "v1000.2", false, "unknown", "unknown protocol"},
 
 
 // producer
-	{"producer", "v0.1", true, "", "current protocol"},
-	{"producer", "v0.2", false, "unknown", "unknown protocol"},
+	{"producer", "v0.2", true, "current", "v0.2"},
+	{"producer", "v0.1", true, "deprecates", "v0.1"},
+	{"producer", "v1000.2", false, "unknown", "unknown protocol"},
 }
 
 func TestProtocolTests(t *testing.T) {
diff --git a/discovery/src/asapo_discovery/protocols/protocols.go b/discovery/src/asapo_discovery/protocols/protocols.go
index ada29e7f0e67348e233e8eb21f40903c0c12080a..89561b378cf0fecdd9f11f1043505b61ca8ca18c 100644
--- a/discovery/src/asapo_discovery/protocols/protocols.go
+++ b/discovery/src/asapo_discovery/protocols/protocols.go
@@ -1,6 +1,9 @@
 package protocols
 
-import "errors"
+import (
+	"errors"
+	"time"
+)
 
 type protocolValidator interface {
 	IsValid() (hint string, ok bool)
@@ -13,6 +16,17 @@ func (p *protocolValidatorCurrent) IsValid() (hint string, ok bool) {
 	return "current", true
 }
 
+type protocolValidatorDeprecated struct {
+	deprecates time.Time
+}
+
+func (p *protocolValidatorDeprecated) IsValid() (hint string, ok bool) {
+	if time.Now().After(p.deprecates) {
+		return "deprecated at "+p.deprecates.String(), false
+	}
+	return "deprecates at "+p.deprecates.String(), true
+}
+
 type Protocol struct {
 	Version   string
 	MicroserviceAPis map[string]string
diff --git a/discovery/src/asapo_discovery/server/get_version_test.go b/discovery/src/asapo_discovery/server/get_version_test.go
index 5a1e99c8a159a36b9c884b51b83a8d97d11c851e..74c1516234b8ba5bbf94256118a174901174c8ee 100644
--- a/discovery/src/asapo_discovery/server/get_version_test.go
+++ b/discovery/src/asapo_discovery/server/get_version_test.go
@@ -31,18 +31,18 @@ var versionTests = []struct {
 
 	{"?client=consumer&protocol=v0.1", versionInfo{
 		SoftwareVersion: coreVer,
-		ClientProtocol:     protocols.ProtocolInfo{"v0.1 (current)",
+		ClientProtocol:     protocols.ProtocolInfo{"v0.1 (deprecates at 2022-06-01 00:00:00 +0000 UTC)",
 			map[string]string{"Authorizer":"v0.1", "Broker":"v0.1", "Data cache service":"v0.1", "Discovery":"v0.1", "File Transfer":"v0.1"}},
 		ClientSupported:            "yes",
 	}, http.StatusOK, "consumer client"},
 	{"?client=producer&protocol=v0.1", versionInfo{
 		SoftwareVersion:        coreVer,
-		ClientProtocol: protocols.ProtocolInfo{"v0.1 (current)",map[string]string{"Discovery":"v0.1", "Receiver":"v0.1"}},
+		ClientProtocol: protocols.ProtocolInfo{"v0.1 (deprecates at 2022-06-01 00:00:00 +0000 UTC)",map[string]string{"Discovery":"v0.1", "Receiver":"v0.1"}},
 		ClientSupported:        "yes",
 	}, http.StatusOK, "producer client"},
-	{"?client=producer&protocol=v0.2", versionInfo{
+	{"?client=producer&protocol=v1000.2", versionInfo{
 		SoftwareVersion:        coreVer,
-		ClientProtocol: protocols.ProtocolInfo{"v0.2 (unknown protocol)",nil},
+		ClientProtocol: protocols.ProtocolInfo{"v1000.2 (unknown protocol)",nil},
 		ClientSupported:        "no",
 	}, http.StatusOK, "producer client unknown"},
 }
diff --git a/discovery/src/asapo_discovery/server/routes_test.go b/discovery/src/asapo_discovery/server/routes_test.go
index e72d0c55abfba927659cf6e56f651046b504e2a0..394a2625047932bd968d8f4b5020c0249369a305 100644
--- a/discovery/src/asapo_discovery/server/routes_test.go
+++ b/discovery/src/asapo_discovery/server/routes_test.go
@@ -67,8 +67,8 @@ message string
 
 var receiverTests = []requestTest {
 	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver",http.StatusBadRequest,"protocol missing"},
-	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v0.2",http.StatusUnsupportedMediaType,"wrong protocol"},
-	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v0.1",http.StatusOK,"ok"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v1000.2",http.StatusUnsupportedMediaType,"wrong protocol"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v0.2",http.StatusOK,"ok"},
 }
 
 func (suite *GetServicesTestSuite) TestGetReceivers() {
@@ -93,8 +93,8 @@ func (suite *GetServicesTestSuite) TestGetReceivers() {
 
 var brokerTests = []requestTest {
 	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker",http.StatusBadRequest,"protocol missing"},
-	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v0.2",http.StatusUnsupportedMediaType,"wrong protocol"},
-	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v0.1",http.StatusOK,"ok"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v1000.2",http.StatusUnsupportedMediaType,"wrong protocol"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v0.2",http.StatusOK,"ok"},
 }
 func (suite *GetServicesTestSuite) TestGetBroker() {
 	for _,test:= range brokerTests {
diff --git a/file_transfer/src/asapo_file_transfer/server/transfer.go b/file_transfer/src/asapo_file_transfer/server/transfer.go
index e6528ff34e280ccd9510d1e910f3038bb3ccb2b9..cb50067dca71a6fcb13bb08ee4b0a9c0c03e22d2 100644
--- a/file_transfer/src/asapo_file_transfer/server/transfer.go
+++ b/file_transfer/src/asapo_file_transfer/server/transfer.go
@@ -10,6 +10,7 @@ import (
 	"net/http"
 	"os"
 	"path"
+	"strconv"
 )
 
 
@@ -18,13 +19,12 @@ type fileTransferRequest struct {
 	FileName string
 }
 
-
 func Exists(name string) bool {
-	fi, err := os.Stat(name)
-	return err==nil && !fi.IsDir()
+	f, err := os.Open(name)
+	defer f.Close()
+	return err==nil
 }
 
-
 func checkClaim(r *http.Request,request* fileTransferRequest) (int,error) {
 	var extraClaim structs.FolderTokenTokenExtraClaim
 	if err := utils.JobClaimFromContext(r, nil, &extraClaim); err != nil {
@@ -40,7 +40,7 @@ func checkClaim(r *http.Request,request* fileTransferRequest) (int,error) {
 
 func checkFileExists(r *http.Request,name string) (int,error) {
 	if !Exists(name) {
-		err_txt := "file "+name+" does not exist"
+		err_txt := "file "+name+" does not exist or cannot be read"
 		log.Error("cannot transfer file: "+err_txt)
 		return http.StatusNotFound,errors.New(err_txt)
 	}
@@ -82,7 +82,7 @@ func serveFileSize(w http.ResponseWriter, r *http.Request, fullName string) {
 		utils.WriteServerError(w,err,http.StatusBadRequest)
 		log.Error("Error getting file size for " + fullName+": "+err.Error())
 	}
-	log.Debug("Sending file size for " + fullName)
+	log.Debug("Sending file size "+strconv.FormatInt(fi.Size(),10)+" for " + fullName)
 
 	fsize.FileSize = fi.Size()
 	b,_ := json.Marshal(&fsize)
diff --git a/producer/api/cpp/include/asapo/producer/producer.h b/producer/api/cpp/include/asapo/producer/producer.h
index a2fde18ebeeb2f3ae0a02784bf6f5ccf39382bc3..9091f7df846b4b996b7c7d01b1c2298dc105a8db 100644
--- a/producer/api/cpp/include/asapo/producer/producer.h
+++ b/producer/api/cpp/include/asapo/producer/producer.h
@@ -40,6 +40,15 @@ class Producer {
     */
     virtual StreamInfo GetStreamInfo(std::string stream, uint64_t timeout_ms, Error* err) const = 0;
 
+  //! Delete stream
+  /*!
+    \param stream - stream to send messages to
+    \param timeout_ms - operation timeout in milliseconds
+    \param options - delete stream options
+    \return Error - will be nullptr on success
+  */
+  virtual Error DeleteStream(std::string stream, uint64_t timeout_ms, DeleteStreamOptions options) const = 0;
+
   //! Get stream that has the newest ingested data
   /*!
     \param timeout_ms - operation timeout in milliseconds
diff --git a/producer/api/cpp/src/producer_impl.cpp b/producer/api/cpp/src/producer_impl.cpp
index 2a4d38a5b11699300300d77e93db4992ae84ce23..09ed2e2f72a9f259308e1898f86360c14ae890d1 100644
--- a/producer/api/cpp/src/producer_impl.cpp
+++ b/producer/api/cpp/src/producer_impl.cpp
@@ -218,7 +218,7 @@ Error ProducerImpl::SetCredentials(SourceCredentials source_cred) {
     }
 
     if (source_cred.data_source.empty()) {
-        source_cred.data_source = SourceCredentials::kDefaultStream;
+        source_cred.data_source = SourceCredentials::kDefaultDataSource;
     }
 
     if (source_cred.beamline.empty()) {
@@ -300,20 +300,23 @@ Error ProducerImpl::SendFile(const MessageHeader &message_header,
 
 }
 
-using RequestCallbackWithPromise = void (*)(std::shared_ptr<std::promise<StreamInfoResult>>,
+template<class T >
+using RequestCallbackWithPromise = void (*)(std::shared_ptr<std::promise<T>>,
                                             RequestCallbackPayload header, Error err);
 
-RequestCallback unwrap_callback(RequestCallbackWithPromise callback,
-                                std::unique_ptr<std::promise<StreamInfoResult>> promise) {
-    auto shared_promise = std::shared_ptr<std::promise<StreamInfoResult>>(std::move(promise));
+
+template<class T>
+RequestCallback unwrap_callback(RequestCallbackWithPromise<T> callback,
+                                std::unique_ptr<std::promise<T>> promise) {
+    auto shared_promise = std::shared_ptr<std::promise<T>>(std::move(promise));
     RequestCallback wrapper = [ = ](RequestCallbackPayload payload, Error err) -> void {
         callback(shared_promise, std::move(payload), std::move(err));
     };
     return wrapper;
 }
 
-void ActivatePromise(std::shared_ptr<std::promise<StreamInfoResult>> promise, RequestCallbackPayload payload,
-                     Error err) {
+void ActivatePromiseForStreamInfo(std::shared_ptr<std::promise<StreamInfoResult>> promise, RequestCallbackPayload payload,
+                                  Error err) {
     StreamInfoResult res;
     if (err == nullptr) {
         auto ok = res.sinfo.SetFromJson(payload.response);
@@ -327,22 +330,31 @@ void ActivatePromise(std::shared_ptr<std::promise<StreamInfoResult>> promise, Re
     } catch(...) {}
 }
 
-StreamInfo GetInfoFromCallback(std::future<StreamInfoResult>* promiseResult, uint64_t timeout_ms, Error* err) {
+void ActivatePromiseForErrorInterface(std::shared_ptr<std::promise<ErrorInterface*>> promise, RequestCallbackPayload payload,
+                                  Error err) {
+    ErrorInterface* res;
+    if (err == nullptr) {
+        res = nullptr;
+    } else {
+        res = err.release();
+    }
+    try {
+        promise->set_value(res);
+    } catch(...) {}
+}
+
+
+template<class T>
+T GetResultFromCallback(std::future<T>* promiseResult, uint64_t timeout_ms, Error* err) {
     try {
         auto status = promiseResult->wait_for(std::chrono::milliseconds(timeout_ms));
         if (status == std::future_status::ready) {
-            auto res = promiseResult->get();
-            if (res.err == nullptr) {
-                return res.sinfo;
-            } else {
-                (*err).reset(res.err);
-                return StreamInfo{};
-            }
+            return promiseResult->get();
         }
     } catch(...) {}
 
     *err = ProducerErrorTemplates::kTimeout.Generate();
-    return StreamInfo{};
+    return T{};
 }
 
 
@@ -362,14 +374,22 @@ StreamInfo ProducerImpl::StreamRequest(StreamRequestOp op,std::string stream, ui
 
     *err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(header),
                                                                                             nullptr, "", "",
-                                                                                            unwrap_callback(ActivatePromise, std::move(promise)), true,
+                                                                                            unwrap_callback(
+                                                                                                ActivatePromiseForStreamInfo,
+                                                                                                std::move(promise)), true,
                                                                                             timeout_ms}
     }, true);
     if (*err) {
         return StreamInfo{};
     }
-    return GetInfoFromCallback(&promiseResult, timeout_ms + 2000,
-                               err); // we give two more sec for request to exit by timeout
+    auto res = GetResultFromCallback<StreamInfoResult>(&promiseResult, timeout_ms + 2000,
+                                                       err); // we give two more sec for request to exit by timeout
+    if (res.err == nullptr) {
+        return res.sinfo;
+    } else {
+        (*err).reset(res.err);
+        return StreamInfo{};
+    }
 }
 
 StreamInfo ProducerImpl::GetStreamInfo(std::string stream, uint64_t timeout_ms, Error* err) const {
@@ -420,4 +440,29 @@ Error ProducerImpl::GetServerVersionInfo(std::string* server_info,
     return ExtractVersionFromResponse(response,"producer",server_info,supported);
 }
 
+Error ProducerImpl::DeleteStream(std::string stream, uint64_t timeout_ms, DeleteStreamOptions options) const {
+    auto header = GenericRequestHeader{kOpcodeDeleteStream, 0, 0, 0, "", stream};
+    header.custom_data[0] = options.Encode();
+
+    std::unique_ptr<std::promise<ErrorInterface*>> promise {new std::promise<ErrorInterface*>};
+    std::future<ErrorInterface*> promiseResult = promise->get_future();
+
+    auto err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(header),
+                                                                                                nullptr, "", "",
+                                                                                                unwrap_callback<ErrorInterface*>(
+                                                                                                    ActivatePromiseForErrorInterface,
+                                                                                                    std::move(promise)), true,
+                                                                                                timeout_ms}
+    }, true);
+    if (err) {
+        return err;
+    }
+
+    auto res = GetResultFromCallback<ErrorInterface*>(&promiseResult, timeout_ms + 2000, &err); // we give two more sec for request to exit by timeout
+    if (err) {
+        return err;
+    }
+    return Error{res};
+}
+
 }
\ No newline at end of file
diff --git a/producer/api/cpp/src/producer_impl.h b/producer/api/cpp/src/producer_impl.h
index 53fda3df0a07508efce6dc7f7ef891af2601bf4a..3bdab64d2ad37c1225f86af634f83c32023b1a50 100644
--- a/producer/api/cpp/src/producer_impl.h
+++ b/producer/api/cpp/src/producer_impl.h
@@ -58,6 +58,8 @@ class ProducerImpl : public Producer {
   Error SendStreamFinishedFlag(std::string stream, uint64_t last_id, std::string next_stream,
                                   RequestCallback callback) override;
 
+  Error DeleteStream(std::string stream, uint64_t timeout_ms, DeleteStreamOptions options) const override;
+
   AbstractLogger* log__;
   std::unique_ptr<HttpClient> httpclient__;
   std::unique_ptr<RequestPool> request_pool__;
diff --git a/producer/api/cpp/src/request_handler_filesystem.cpp b/producer/api/cpp/src/request_handler_filesystem.cpp
index 39a8c3d6935471e656d8a65ca8fecbb233c457e0..968c68b347f341af8ac71048e4092e54895be5ec 100644
--- a/producer/api/cpp/src/request_handler_filesystem.cpp
+++ b/producer/api/cpp/src/request_handler_filesystem.cpp
@@ -38,7 +38,7 @@ bool RequestHandlerFilesystem::ProcessRequestUnlocked(GenericRequest* request, b
     return true;
 }
 
-void RequestHandlerFilesystem::ProcessRequestTimeout(GenericRequest* request) {
+void RequestHandlerFilesystem::ProcessRequestTimeoutUnlocked(GenericRequest* request) {
     log__->Error("request timeout, id:" + std::to_string(request->header.data_id) + " to " + request->header.stream +
                  " stream");
 }
diff --git a/producer/api/cpp/src/request_handler_filesystem.h b/producer/api/cpp/src/request_handler_filesystem.h
index bbb5250c7fb95f41fbefdd3ec12d1e7c27564914..15dc03e563bb6ca0cc0bb6374bbf5ad0205e9d64 100644
--- a/producer/api/cpp/src/request_handler_filesystem.h
+++ b/producer/api/cpp/src/request_handler_filesystem.h
@@ -23,7 +23,7 @@ class RequestHandlerFilesystem: public RequestHandler {
     };
     void PrepareProcessingRequestLocked()  override {};
     void TearDownProcessingRequestLocked(bool request_processed_successfully)  override {};
-    void ProcessRequestTimeout(GenericRequest* request)  override;
+    void ProcessRequestTimeoutUnlocked(GenericRequest* request)  override;
 
     virtual ~RequestHandlerFilesystem() = default;
     std::unique_ptr<IO> io__;
diff --git a/producer/api/cpp/src/request_handler_tcp.cpp b/producer/api/cpp/src/request_handler_tcp.cpp
index 85895cfd06c37dcd4257ad88c2c6bbf1d5be75e4..9be95ddd219f83c025a6f36caac4119c65efd22e 100644
--- a/producer/api/cpp/src/request_handler_tcp.cpp
+++ b/producer/api/cpp/src/request_handler_tcp.cpp
@@ -307,7 +307,7 @@ void RequestHandlerTcp::TearDownProcessingRequestLocked(bool request_processed_s
     }
 }
 
-void RequestHandlerTcp::ProcessRequestTimeout(GenericRequest* request) {
+void RequestHandlerTcp::ProcessRequestTimeoutUnlocked(GenericRequest* request) {
     auto producer_request = static_cast<ProducerRequest*>(request);
     auto err_string = "request id:" + std::to_string(request->header.data_id) + ", opcode: " + std::to_string(
         request->header.op_code) + " for " + request->header.stream +
diff --git a/producer/api/cpp/src/request_handler_tcp.h b/producer/api/cpp/src/request_handler_tcp.h
index a891a81d5e139da8c60f00d98997cb8c70e2ea1e..7b8e6187480bf0e0452a1affbc719fb9839db2ea 100644
--- a/producer/api/cpp/src/request_handler_tcp.h
+++ b/producer/api/cpp/src/request_handler_tcp.h
@@ -23,7 +23,7 @@ class RequestHandlerTcp: public RequestHandler {
     bool ReadyProcessRequest() override;
     void PrepareProcessingRequestLocked()  override;
     void TearDownProcessingRequestLocked(bool request_processed_successfully)  override;
-    void ProcessRequestTimeout(GenericRequest* request)  override;
+    void ProcessRequestTimeoutUnlocked(GenericRequest* request)  override;
 
     virtual ~RequestHandlerTcp() = default;
     std::unique_ptr<IO> io__;
diff --git a/producer/api/cpp/unittests/test_producer_impl.cpp b/producer/api/cpp/unittests/test_producer_impl.cpp
index 5219352f164def64bf0b0d1c819af0feba464c3d..17d038597992c201a9cba0e9b9281b747bfca28b 100644
--- a/producer/api/cpp/unittests/test_producer_impl.cpp
+++ b/producer/api/cpp/unittests/test_producer_impl.cpp
@@ -518,14 +518,13 @@ TEST_F(ProducerImplTests, ReturnDataIfCanotAddToQueue) {
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kRequestPoolIsFull));
     ASSERT_THAT(original_data_in_err, Ne(nullptr));
     ASSERT_THAT(original_data_in_err[40], Eq(10));
-
 }
 
 TEST_F(ProducerImplTests, GetVersionInfoWithServer) {
 
     std::string result = R"({"softwareVersion":"20.03.1, build 7a9294ad","clientSupported":"no", "clientProtocol":{"versionInfo":"v0.2"}})";
 
-    EXPECT_CALL(*mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?client=producer&protocol=v0.1"), _,_)).WillOnce(DoAll(
+    EXPECT_CALL(*mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?client=producer&protocol=v0.2"), _,_)).WillOnce(DoAll(
         SetArgPointee<1>(asapo::HttpCode::OK),
         SetArgPointee<2>(nullptr),
         Return(result)));
@@ -537,4 +536,30 @@ TEST_F(ProducerImplTests, GetVersionInfoWithServer) {
     ASSERT_THAT(server_info, HasSubstr("v0.2"));
 }
 
+MATCHER_P4(M_CheckDeleteStreamRequest, op_code, source_credentials, stream,flag,
+           "Checks if a valid GenericRequestHeader was Send") {
+    auto request = static_cast<ProducerRequest*>(arg);
+    return ((asapo::GenericRequestHeader) (arg->header)).op_code == op_code
+        && request->source_credentials == source_credentials
+        && ((asapo::GenericRequestHeader) (arg->header)).custom_data[0] == flag
+        && strcmp(((asapo::GenericRequestHeader) (arg->header)).stream, stream) == 0;
+}
+
+TEST_F(ProducerImplTests, DeleteStreamMakesCorerctRequest) {
+    producer.SetCredentials(expected_credentials);
+    asapo::DeleteStreamOptions expected_options{};
+    expected_options.delete_meta = true;
+    expected_options.error_on_not_exist = true;
+    auto flag =3;
+
+    EXPECT_CALL(mock_pull, AddRequest_t(M_CheckDeleteStreamRequest(asapo::kOpcodeDeleteStream,
+                                                                    expected_credentials_str,
+                                                                    expected_stream, flag), true)).WillOnce(
+        Return(nullptr));
+
+    asapo::DeleteStreamOptions options{};
+    auto err = producer.DeleteStream(expected_stream, 1000,options);
+    ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
+}
+
 }
diff --git a/producer/api/cpp/unittests/test_producer_request.cpp b/producer/api/cpp/unittests/test_producer_request.cpp
index b879fb32833c7dd8bdb911adddd7db9f007a9fc5..2e4509db30c2723ba5a933a1698d0edd53a64269 100644
--- a/producer/api/cpp/unittests/test_producer_request.cpp
+++ b/producer/api/cpp/unittests/test_producer_request.cpp
@@ -40,7 +40,7 @@ TEST(ProducerRequest, Constructor) {
     uint64_t expected_file_size = 1337;
     uint64_t expected_meta_size = 137;
     std::string expected_meta = "meta";
-    std::string expected_api_version = "v0.1";
+    std::string expected_api_version = "v0.2";
     asapo::Opcode expected_op_code = asapo::kOpcodeTransferData;
 
     asapo::GenericRequestHeader header{expected_op_code, expected_file_id, expected_file_size,
diff --git a/producer/api/cpp/unittests/test_receiver_discovery_service.cpp b/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
index 014be20c89dd3d247a2e3d7134c9c43fb5131792..956dc9df45e38a23bc0b89b8ca07dfd000eacbc5 100644
--- a/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
+++ b/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
@@ -48,7 +48,7 @@ class ReceiversStatusTests : public Test {
     NiceMock<asapo::MockLogger> mock_logger;
     NiceMock<MockHttpClient>* mock_http_client;
 
-    std::string expected_endpoint{"endpoint/asapo-discovery/v0.1/asapo-receiver?protocol=v0.1"};
+    std::string expected_endpoint{"endpoint/asapo-discovery/v0.1/asapo-receiver?protocol=v0.2"};
     ReceiverDiscoveryService status{"endpoint", 20};
 
     void SetUp() override {
diff --git a/producer/api/cpp/unittests/test_request_handler_tcp.cpp b/producer/api/cpp/unittests/test_request_handler_tcp.cpp
index 1d7b812cd1e88df4915b65a83639774ef8ececb8..9066b904493af1df5a095a28abdcf1cdc0f55981 100644
--- a/producer/api/cpp/unittests/test_request_handler_tcp.cpp
+++ b/producer/api/cpp/unittests/test_request_handler_tcp.cpp
@@ -974,7 +974,7 @@ TEST_F(RequestHandlerTcpTests, TimeoutCallsCallback) {
         HasSubstr("stream"))
     ));
 
-    request_handler.ProcessRequestTimeout(&request);
+    request_handler.ProcessRequestTimeoutUnlocked(&request);
 
     ASSERT_THAT(callback_err, Eq(asapo::ProducerErrorTemplates::kTimeout));
     ASSERT_THAT(callback_called, Eq(true));
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index cd627b6203aa0e9322625f893d7203dfd392c6a1..f78491a2fe48610944078ed7dc8d0996da170b97 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -74,6 +74,9 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo":
     GenericRequestHeader original_header
     MessageData data
     string response
+  struct DeleteStreamOptions:
+    bool delete_meta
+    bool error_on_not_exist
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   cppclass RequestCallback:
@@ -106,6 +109,7 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo" nogil:
         StreamInfo GetStreamInfo(string stream, uint64_t timeout_ms, Error* err)
         StreamInfo GetLastStream(uint64_t timeout_ms, Error* err)
         Error GetVersionInfo(string* client_info,string* server_info, bool* supported)
+        Error DeleteStream(string stream, uint64_t timeout_ms, DeleteStreamOptions options)
 
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index 3b3609cc4a533390ee56d4ff31eecef79f6a5090..24d97cb5c3b5a51ed5b3ce05b2e529beaa9f6de4 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -122,7 +122,7 @@ cdef class PyProducer:
         cdef bool* p_supported =  &supported if from_server else <bool*>NULL
         cdef Error err
         with nogil:
-                err =  self.c_producer.get().GetVersionInfo(&client_info,p_server_info,p_supported)
+            err =  self.c_producer.get().GetVersionInfo(&client_info,p_server_info,p_supported)
         if err:
             throw_exception(err)
         version = {}
@@ -223,6 +223,27 @@ cdef class PyProducer:
         if callback != None:
             Py_XINCREF(<PyObject*>callback)
 
+    def delete_stream(self, stream = 'default', uint64_t timeout_ms = 1000,bool error_on_not_exist = True):
+        """
+         :param stream: stream name
+         :type stream: string
+         :param timeout_ms: timeout in milliseconds
+         :type timeout_ms: int
+         :param error_on_not_exist: will emit AsapoWrongInputError if set to true and tries to delete non-existing stream
+         :type error_on_not_exist: bool
+         :raises:
+            AsapoWrongInputError: wrong input (authorization, ...)
+            AsapoTimeoutError: request not finished for a given timeout
+            AsapoProducerError: other errors
+        """
+        cdef Error err
+        cdef DeleteStreamOptions opts
+        cdef string b_stream = _bytes(stream)
+        opts.error_on_not_exist = error_on_not_exist
+        with nogil:
+            err = self.c_producer.get().DeleteStream(b_stream,timeout_ms,opts)
+        if err:
+            throw_exception(err)
     def stream_info(self, stream = 'default', uint64_t timeout_ms = 1000):
         """
          :param stream: stream name
diff --git a/receiver/CMakeLists.txt b/receiver/CMakeLists.txt
index 6cde4bbaaf5c8307f43086785ff665e552ebc261..0bc661d5f7eea49a3926a5bc71641d0adb24a4b0 100644
--- a/receiver/CMakeLists.txt
+++ b/receiver/CMakeLists.txt
@@ -21,6 +21,7 @@ set(RECEIVER_CORE_FILES
         src/request_handler/request_handler_db_last_stream.cpp
         src/request_handler/request_handler_receive_metadata.cpp
         src/request_handler/request_handler_db_check_request.cpp
+        src/request_handler/request_handler_delete_stream.cpp
         src/request_handler/request_factory.cpp
         src/request_handler/request_handler_db.cpp
         src/file_processors/write_file_processor.cpp
@@ -97,6 +98,7 @@ set(TEST_SOURCE_FILES
         unittests/request_handler/test_request_handler_authorizer.cpp
         unittests/request_handler/test_request_handler_receive_data.cpp
         unittests/request_handler/test_request_handler_receive_metadata.cpp
+        unittests/request_handler/test_request_handler_delete_stream.cpp
         unittests/statistics/test_statistics_sender_influx_db.cpp
         unittests/statistics/test_statistics_sender_fluentd.cpp
         unittests/mock_receiver_config.cpp
diff --git a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
index 0f8f387faf1d2bd20490a4e6b409612b1c81e81d..1529dc7ff4dfa201e0ee0c224fcb2ea3f0dde6e4 100644
--- a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
+++ b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
@@ -86,7 +86,7 @@ void ReceiverDataServerRequestHandler::TearDownProcessingRequestLocked(bool /*pr
 // do nothing
 }
 
-void ReceiverDataServerRequestHandler::ProcessRequestTimeout(GenericRequest* /*request*/) {
+void ReceiverDataServerRequestHandler::ProcessRequestTimeoutUnlocked(GenericRequest* /*request*/) {
 // do nothing
 }
 
diff --git a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
index 18fc5937a793c9358376d24ec0480496ce93e88a..952cb1cb78e83b6c92ecd142e7f6761a6b086a44 100644
--- a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
+++ b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
@@ -17,7 +17,7 @@ class ReceiverDataServerRequestHandler: public RequestHandler {
     bool ReadyProcessRequest() override;
     void PrepareProcessingRequestLocked()  override;
     void TearDownProcessingRequestLocked(bool processing_succeeded)  override;
-    void ProcessRequestTimeout(GenericRequest* request)  override;
+    void ProcessRequestTimeoutUnlocked(GenericRequest* request)  override;
 
     const AbstractLogger* log__;
     Statistics* statistics__;
diff --git a/receiver/src/request_handler/request_factory.cpp b/receiver/src/request_handler/request_factory.cpp
index de3f74cec40946b86836c391a28e85001bef8320..847411057b8af8024381d82aa014eead0f046158 100644
--- a/receiver/src/request_handler/request_factory.cpp
+++ b/receiver/src/request_handler/request_factory.cpp
@@ -83,6 +83,10 @@ Error RequestFactory::AddHandlersToRequest(std::unique_ptr<Request> &request,
             request->AddHandler(&request_handler_db_stream_info_);
             break;
         }
+        case Opcode::kOpcodeDeleteStream: {
+            request->AddHandler(&request_handler_delete_stream_);
+            break;
+        }
         case Opcode::kOpcodeLastStream: {
             request->AddHandler(&request_handler_db_last_stream_);
             break;
diff --git a/receiver/src/request_handler/request_factory.h b/receiver/src/request_handler/request_factory.h
index fa6d1a53aa3b67576c1e254dd62d34634b923e76..374c586ab0e7609effa52cfe12017b471561734a 100644
--- a/receiver/src/request_handler/request_factory.h
+++ b/receiver/src/request_handler/request_factory.h
@@ -6,6 +6,7 @@
 #include "../file_processors/receive_file_processor.h"
 #include "request_handler_db_stream_info.h"
 #include "request_handler_db_last_stream.h"
+#include "request_handler_delete_stream.h"
 
 namespace asapo {
 
@@ -25,6 +26,7 @@ class RequestFactory {
     RequestHandlerReceiveMetaData request_handler_receive_metadata_;
     RequestHandlerDbWrite request_handler_dbwrite_{kDBDataCollectionNamePrefix};
     RequestHandlerDbStreamInfo request_handler_db_stream_info_{kDBDataCollectionNamePrefix};
+    RequestHandlerDeleteStream request_handler_delete_stream_{kDBDataCollectionNamePrefix};
     RequestHandlerDbLastStream request_handler_db_last_stream_{kDBDataCollectionNamePrefix};
     RequestHandlerDbMetaWrite request_handler_db_meta_write_{kDBMetaCollectionName};
     RequestHandlerAuthorize request_handler_authorize_;
diff --git a/receiver/src/request_handler/request_handler_delete_stream.cpp b/receiver/src/request_handler/request_handler_delete_stream.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..42719ff49899275d3bd364e322d13e8f0172c482
--- /dev/null
+++ b/receiver/src/request_handler/request_handler_delete_stream.cpp
@@ -0,0 +1,43 @@
+#include "request_handler_delete_stream.h"
+#include "../receiver_config.h"
+#include <asapo/database/db_error.h>
+
+namespace asapo {
+
+RequestHandlerDeleteStream::RequestHandlerDeleteStream(std::string collection_name_prefix) : RequestHandlerDb(
+    std::move(collection_name_prefix)) {
+}
+
+Error RequestHandlerDeleteStream::ProcessRequest(Request* request) const {
+    if (auto err = RequestHandlerDb::ProcessRequest(request) ) {
+        return err;
+    }
+
+    DeleteStreamOptions options{};
+    uint64_t flag = request->GetCustomData()[0];
+    options.Decode(flag);
+    auto stream_name = request->GetStream();
+
+    if (!options.delete_meta) {
+        log__->Debug(std::string{"skipped deleting stream meta in "} + stream_name + " in " +
+            db_name_ + " at " + GetReceiverConfig()->database_uri);
+        return nullptr;
+    }
+    auto err =  db_client__->DeleteStream(stream_name);
+
+    bool no_error = err == nullptr;
+    if (err == DBErrorTemplates::kNoRecord && !options.error_on_not_exist) {
+        no_error = true;
+    }
+
+    if (no_error) {
+        log__->Debug(std::string{"deleted stream meta in "} + stream_name + " in " +
+            db_name_ + " at " + GetReceiverConfig()->database_uri);
+        return nullptr;
+    }
+
+    return err;
+}
+
+
+}
\ No newline at end of file
diff --git a/receiver/src/request_handler/request_handler_delete_stream.h b/receiver/src/request_handler/request_handler_delete_stream.h
new file mode 100644
index 0000000000000000000000000000000000000000..3cf4e0fb0a46dd399c0e4dc4f23e2087c5f96790
--- /dev/null
+++ b/receiver/src/request_handler/request_handler_delete_stream.h
@@ -0,0 +1,18 @@
+#ifndef ASAPO_REQUEST_HANDLER_DELETE_STREAM_H
+#define ASAPO_REQUEST_HANDLER_DELETE_STREAM_H
+
+#include "request_handler_db.h"
+#include "../request.h"
+
+namespace asapo {
+
+class RequestHandlerDeleteStream final: public RequestHandlerDb {
+ public:
+  RequestHandlerDeleteStream(std::string collection_name_prefix);
+  Error ProcessRequest(Request* request) const override;
+};
+
+}
+
+
+#endif //ASAPO_REQUEST_HANDLER_DELETE_STREAM_H
diff --git a/receiver/src/request_handler/requests_dispatcher.cpp b/receiver/src/request_handler/requests_dispatcher.cpp
index 2487f2d70ce0d86c4a325e7a7ae0236277a14152..79d414af2535316d025c13d9c4cdca127ddb05b9 100644
--- a/receiver/src/request_handler/requests_dispatcher.cpp
+++ b/receiver/src/request_handler/requests_dispatcher.cpp
@@ -22,7 +22,7 @@ NetworkErrorCode GetNetworkCodeFromError(const Error& err) {
             return NetworkErrorCode::kNetErrorNotSupported;
         } else if (err == ReceiverErrorTemplates::kReAuthorizationFailure) {
             return NetworkErrorCode::kNetErrorReauthorize;
-        } else if (err == DBErrorTemplates::kJsonParseError || err == ReceiverErrorTemplates::kBadRequest) {
+        } else if (err == DBErrorTemplates::kJsonParseError || err == ReceiverErrorTemplates::kBadRequest || err == DBErrorTemplates::kNoRecord) {
             return NetworkErrorCode::kNetErrorWrongRequest;
         } else {
             return NetworkErrorCode::kNetErrorInternalServerError;
diff --git a/receiver/unittests/request_handler/test_request_factory.cpp b/receiver/unittests/request_handler/test_request_factory.cpp
index a6c1d02b6c6479928f8b625d96fdcd1ba73a331b..b9b8418e0d3591acf378b2f2704f54678789a1b3 100644
--- a/receiver/unittests/request_handler/test_request_factory.cpp
+++ b/receiver/unittests/request_handler/test_request_factory.cpp
@@ -15,6 +15,7 @@
 #include "../../src/request_handler/request_handler_authorize.h"
 #include "../../src/request_handler/request_handler_db_stream_info.h"
 #include "../../src/request_handler/request_handler_db_last_stream.h"
+#include "../../src/request_handler/request_handler_delete_stream.h"
 
 #include "../../src/request_handler/request_handler_receive_data.h"
 #include "../../src/request_handler/request_handler_receive_metadata.h"
@@ -213,4 +214,14 @@ TEST_F(FactoryTests, LastStreamRequest) {
     ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDbLastStream*>(request->GetListHandlers()[1]), Ne(nullptr));
 }
 
+TEST_F(FactoryTests, DeleteStreamRequest) {
+    generic_request_header.op_code = asapo::Opcode::kOpcodeDeleteStream;
+    auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(request->GetListHandlers().size(), Eq(2));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDeleteStream*>(request->GetListHandlers()[1]), Ne(nullptr));
+}
+
+
 }
diff --git a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
index d6309ad3b39d324c2261dd92b8cb1a4f6c98b37c..dc15c7d3231d03b66f204817047c7ed4fd900245 100644
--- a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
@@ -277,7 +277,7 @@ TEST_F(AuthorizerHandlerTests, RequestFromUnsupportedClient) {
         .WillOnce(Return(asapo::kOpcodeAuthorize))
         ;
     EXPECT_CALL(*mock_request, GetApiVersion())
-        .WillOnce(Return("v0.2"))
+        .WillOnce(Return("v1000.2"))
         ;
 
     auto err = handler.ProcessRequest(mock_request.get());
diff --git a/receiver/unittests/request_handler/test_request_handler_delete_stream.cpp b/receiver/unittests/request_handler/test_request_handler_delete_stream.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..277ccdea4003e9dfafa819fc57886c423a772654
--- /dev/null
+++ b/receiver/unittests/request_handler/test_request_handler_delete_stream.cpp
@@ -0,0 +1,152 @@
+#include <gtest/gtest.h>
+#include <gmock/gmock.h>
+#include <asapo/database/db_error.h>
+
+#include "asapo/unittests/MockIO.h"
+#include "asapo/unittests/MockDatabase.h"
+#include "asapo/unittests/MockLogger.h"
+
+#include "../../src/receiver_error.h"
+#include "../../src/request.h"
+#include "../../src/request_handler/request_factory.h"
+#include "../../src/request_handler/request_handler.h"
+#include "../../src/request_handler/request_handler_delete_stream.h"
+#include "../../../common/cpp/src/database/mongodb_client.h"
+
+#include "../mock_receiver_config.h"
+#include "asapo/common/data_structs.h"
+#include "asapo/common/networking.h"
+#include "../receiver_mocking.h"
+
+using asapo::MockRequest;
+using asapo::MessageMeta;
+using ::testing::Test;
+using ::testing::Return;
+using ::testing::ReturnRef;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::SetArgReferee;
+using ::testing::Gt;
+using ::testing::Eq;
+using ::testing::Ne;
+using ::testing::Mock;
+using ::testing::NiceMock;
+using ::testing::InSequence;
+using ::testing::SetArgPointee;
+using ::testing::AllOf;
+using ::testing::HasSubstr;
+
+
+using ::asapo::Error;
+using ::asapo::ErrorInterface;
+using ::asapo::FileDescriptor;
+using ::asapo::SocketDescriptor;
+using ::asapo::MockIO;
+using asapo::Request;
+using asapo::RequestHandlerDeleteStream;
+using ::asapo::GenericRequestHeader;
+
+using asapo::MockDatabase;
+using asapo::RequestFactory;
+using asapo::SetReceiverConfig;
+using asapo::ReceiverConfig;
+
+
+namespace {
+
+class DbMetaDeleteStreamTests : public Test {
+  public:
+    RequestHandlerDeleteStream handler{asapo::kDBDataCollectionNamePrefix};
+    std::unique_ptr<NiceMock<MockRequest>> mock_request;
+    NiceMock<MockDatabase> mock_db;
+    NiceMock<asapo::MockLogger> mock_logger;
+    ReceiverConfig config;
+    std::string expected_beamtime_id = "beamtime_id";
+    std::string expected_data_source = "source";
+    std::string expected_stream = "stream";
+    uint64_t expected_custom_data[asapo::kNCustomParams] {0, 0, 0};
+
+  void SetUp() override {
+        GenericRequestHeader request_header;
+        handler.db_client__ = std::unique_ptr<asapo::Database> {&mock_db};
+        handler.log__ = &mock_logger;
+        mock_request.reset(new NiceMock<MockRequest> {request_header, 1, "", nullptr});
+        ON_CALL(*mock_request, GetBeamtimeId()).WillByDefault(ReturnRef(expected_beamtime_id));
+    }
+    void TearDown() override {
+        handler.db_client__.release();
+    }
+    void ExpectDelete(uint64_t flag, const asapo::DBErrorTemplate* errorTemplate) {
+        expected_custom_data[0] = flag;
+        SetReceiverConfig(config, "none");
+        EXPECT_CALL(*mock_request, GetCustomData_t()).WillOnce(Return(expected_custom_data));
+        EXPECT_CALL(*mock_request, GetDataSource()).WillOnce(ReturnRef(expected_data_source));
+        EXPECT_CALL(*mock_request, GetStream()).WillOnce(Return(expected_stream));
+
+        asapo::DeleteStreamOptions opt;
+        opt.Decode(flag);
+        if (!opt.delete_meta) {
+            EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("skipped deleting stream meta"),
+                                                 HasSubstr(config.database_uri),
+                                                 HasSubstr(expected_data_source),
+                                                 HasSubstr(expected_stream),
+                                                 HasSubstr(expected_beamtime_id)
+                                           )
+            )
+            );            return;
+        }
+
+        EXPECT_CALL(mock_db, Connect_t(config.database_uri, expected_beamtime_id + "_" + expected_data_source)).
+            WillOnce(testing::Return(nullptr));
+        EXPECT_CALL(mock_db, DeleteStream_t(expected_stream)).
+            WillOnce(testing::Return(errorTemplate==nullptr?nullptr:errorTemplate->Generate().release()));
+        if (errorTemplate == nullptr) {
+            EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("deleted stream meta"),
+                                                 HasSubstr(config.database_uri),
+                                                 HasSubstr(expected_data_source),
+                                                 HasSubstr(expected_stream),
+                                                 HasSubstr(expected_beamtime_id)
+                                           )
+            )
+            );
+        }
+
+    }
+};
+
+
+
+TEST_F(DbMetaDeleteStreamTests, CallsDeleteOk) {
+
+    ExpectDelete(3,nullptr);
+
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(nullptr));
+
+}
+
+TEST_F(DbMetaDeleteStreamTests, CallsDeleteErrorAlreadyExist) {
+
+    ExpectDelete(3,&asapo::DBErrorTemplates::kNoRecord);
+    auto err = handler.ProcessRequest(mock_request.get());
+
+    ASSERT_THAT(err, Eq(asapo::DBErrorTemplates::kNoRecord));
+}
+
+TEST_F(DbMetaDeleteStreamTests, CallsDeleteNoErrorAlreadyExist) {
+
+    ExpectDelete(1,&asapo::DBErrorTemplates::kNoRecord);
+    auto err = handler.ProcessRequest(mock_request.get());
+
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+TEST_F(DbMetaDeleteStreamTests, CallsDeleteNoOp) {
+
+    ExpectDelete(0,&asapo::DBErrorTemplates::kNoRecord);
+    auto err = handler.ProcessRequest(mock_request.get());
+
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+}
diff --git a/receiver/unittests/request_handler/test_requests_dispatcher.cpp b/receiver/unittests/request_handler/test_requests_dispatcher.cpp
index b03fc381186651da8185d8722c8f77c233547b08..73ade897611b663cd98209d65c51898764d296f6 100644
--- a/receiver/unittests/request_handler/test_requests_dispatcher.cpp
+++ b/receiver/unittests/request_handler/test_requests_dispatcher.cpp
@@ -338,5 +338,14 @@ TEST_F(RequestsDispatcherTests, ProcessRequestReturnsBadRequest) {
 }
 
 
+TEST_F(RequestsDispatcherTests, ProcessRequestReturnsBNoRecord) {
+    MockHandleRequest(1, asapo::DBErrorTemplates::kNoRecord.Generate());
+    MockSendResponse(&response, false);
+
+    auto err = dispatcher->ProcessRequest(request);
+
+    ASSERT_THAT(response.error_code, Eq(asapo::kNetErrorWrongRequest));
+}
+
 
 }
diff --git a/receiver/unittests/test_request.cpp b/receiver/unittests/test_request.cpp
index 00d724c03d6b1522b7fecb9133f19a363f95c565..202884985b3a75b619460e737e6e6942eb47518d 100644
--- a/receiver/unittests/test_request.cpp
+++ b/receiver/unittests/test_request.cpp
@@ -83,7 +83,7 @@ class RequestTests : public Test {
     uint64_t expected_metadata_size = expected_metadata.size();
     asapo::Opcode expected_op_code = asapo::kOpcodeTransferData;
     char expected_request_message[asapo::kMaxMessageSize] = "test_message";
-    std::string expected_api_version = "v0.1";
+    std::string expected_api_version = "v0.2";
     std::unique_ptr<Request> request;
     NiceMock<MockIO> mock_io;
     NiceMock<MockStatistics> mock_statistics;
diff --git a/tests/automatic/broker/check_monitoring/check_linux.sh b/tests/automatic/broker/check_monitoring/check_linux.sh
index 35ab4eb7cfe57f6c7243b3fb2a9a99aee36e3191..291a0641dd36041cee3de396f08dabc89dc0fc50 100644
--- a/tests/automatic/broker/check_monitoring/check_linux.sh
+++ b/tests/automatic/broker/check_monitoring/check_linux.sh
@@ -33,12 +33,12 @@ sleep 0.3
 
 brokerid=`echo $!`
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.2/creategroup`
 
 
 for i in `seq 1 50`;
 do
-    curl --silent 127.0.0.1:5005/v0.1/beamtime/data/source/stream/${groupid}/next?token=$token >/dev/null 2>&1 &
+    curl --silent 127.0.0.1:5005/v0.2/beamtime/data/source/stream/${groupid}/next?token=$token >/dev/null 2>&1 &
 done
 
 
diff --git a/tests/automatic/broker/get_last/check_linux.sh b/tests/automatic/broker/get_last/check_linux.sh
index 842fc91a96ccef7259992f2a555800da5742e8cb..c89bc54c841d1b6c32ffecb357ddb16b9e309bda 100644
--- a/tests/automatic/broker/get_last/check_linux.sh
+++ b/tests/automatic/broker/get_last/check_linux.sh
@@ -31,23 +31,23 @@ sleep 0.3
 brokerid=`echo $!`
 
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.2/creategroup`
 
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr -
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr -
 
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
 
 echo "db.data_${stream}.insert({"_id":3})" | mongo ${database_name}
 
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":3'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":3'
 
 echo "db.data_${stream}.insert({"_id":4})" | mongo ${database_name}
 
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
 
 #with a new group
-groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
\ No newline at end of file
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.2/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
\ No newline at end of file
diff --git a/tests/automatic/broker/get_last/check_windows.bat b/tests/automatic/broker/get_last/check_windows.bat
index 497384df3d5af7eb9fac5d7419042187ec850066..6ffe451b5789577ee2de913cf9ca6fbb56d28d0a 100644
--- a/tests/automatic/broker/get_last/check_windows.bat
+++ b/tests/automatic/broker/get_last/check_windows.bat
@@ -9,6 +9,10 @@ set short_name="%~nx1"
 
 c:\opt\consul\nomad run authorizer.nmd
 c:\opt\consul\nomad run nginx.nmd
+
+ping 192.0.2.1 -n 1 -w 2000 > nul
+
+
 start /B "" "%full_name%" -config settings.json
 
 ping 192.0.2.1 -n 1 -w 5000 > nul
@@ -17,26 +21,26 @@ ping 192.0.2.1 -n 1 -w 5000 > nul
 set token=%BT_DATA_TOKEN%
 
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.2/creategroup > groupid
 set /P groupid=< groupid
 
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
 
 echo db.data_default.insert({"_id":3}) | %mongo_exe% %database_name%  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":3  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":3  || goto :error
 
 echo db.data_default.insert({"_id":4}) | %mongo_exe% %database_name%  || goto :error
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
 
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.2/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/broker/get_meta/check_linux.sh b/tests/automatic/broker/get_meta/check_linux.sh
index ba084cc02db68612ef667a54c4859060c512f17e..14b23b4d6368cb4cf2d6f86cf8b01160c96051fe 100644
--- a/tests/automatic/broker/get_meta/check_linux.sh
+++ b/tests/automatic/broker/get_meta/check_linux.sh
@@ -29,6 +29,6 @@ $1 -config settings.json &
 sleep 0.3
 brokerid=`echo $!`
 
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test"'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/test/detector/default/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep 'no documents'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test"'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/test/detector/default/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep 'no documents'
 
diff --git a/tests/automatic/broker/get_meta/check_windows.bat b/tests/automatic/broker/get_meta/check_windows.bat
index ee02ec0a23cff2fd68a12b8e45fb171693c460a6..c5dab94bb7b03fb541d4e14e44d360a8171f3327 100644
--- a/tests/automatic/broker/get_meta/check_windows.bat
+++ b/tests/automatic/broker/get_meta/check_windows.bat
@@ -8,6 +8,9 @@ set short_name="%~nx1"
 
 c:\opt\consul\nomad run authorizer.nmd
 c:\opt\consul\nomad run nginx.nmd
+
+ping 192.0.2.1 -n 1 -w 2000 > nul
+
 start /B "" "%full_name%" -config settings.json
 
 ping 192.0.2.1 -n 1 -w 5000 > nul
@@ -15,8 +18,8 @@ ping 192.0.2.1 -n 1 -w 5000 > nul
 set token=%BT_DATA_TOKEN%
 
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"_id\":0  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/meta/1?token=%token% --stderr - | findstr /c:"no documents"  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"_id\":0  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/detector/default/0/meta/1?token=%token% --stderr - | findstr /c:"no documents"  || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/broker/get_next/check_linux.sh b/tests/automatic/broker/get_next/check_linux.sh
index 80bb2312f4fcd83c0bb4d0b4c582709785ec2ef5..797269d0c374002b2486264494c84f61d92b6e15 100644
--- a/tests/automatic/broker/get_next/check_linux.sh
+++ b/tests/automatic/broker/get_next/check_linux.sh
@@ -31,11 +31,11 @@ $1 -config settings.json &
 sleep 0.3
 brokerid=`echo $!`
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":2'
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"id_max":2'
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.2/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"id_max":2'
 
 # with a new group
-groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
-curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr | grep '"_id":1'
\ No newline at end of file
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.2/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr | grep '"_id":1'
\ No newline at end of file
diff --git a/tests/automatic/broker/get_next/check_windows.bat b/tests/automatic/broker/get_next/check_windows.bat
index 89ba33faa721e4c8267d6ae026b5142f1691d140..5c1bb8ba8b402c979640f3177449a05f5bcb0d73 100644
--- a/tests/automatic/broker/get_next/check_windows.bat
+++ b/tests/automatic/broker/get_next/check_windows.bat
@@ -11,19 +11,22 @@ set token=%BT_DATA_TOKEN%
 
 c:\opt\consul\nomad run authorizer.nmd
 c:\opt\consul\nomad run nginx.nmd
+
+ping 192.0.2.1 -n 1 -w 2000 > nul
+
 start /B "" "%full_name%" -config settings.json
 
 ping 192.0.2.1 -n 1 -w 5000 > nul
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.2/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr  /c:\"id_max\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr  /c:\"id_max\":2  || goto :error
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.2/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.2/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/bug_fixes/CMakeLists.txt b/tests/automatic/bug_fixes/CMakeLists.txt
index 6cc3dbaa7a47a0f8687396b687db47de0129ec79..2dba7e68ea370aba06b84e0f85451c68f9432af0 100644
--- a/tests/automatic/bug_fixes/CMakeLists.txt
+++ b/tests/automatic/bug_fixes/CMakeLists.txt
@@ -2,6 +2,7 @@ if (UNIX)
     add_subdirectory(receiver_cpu_usage)
     if (BUILD_PYTHON)
         add_subdirectory(consumer_python_memleak)
+        add_subdirectory(python_deadlock)
         add_subdirectory(streamlist_python_multithread)
         add_subdirectory(error-sending-data-using-callback-method)
     endif()
diff --git a/tests/automatic/bug_fixes/python_deadlock/CMakeLists.txt b/tests/automatic/bug_fixes/python_deadlock/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b4007ef6387cb0aac545de107c108ebf329e5531
--- /dev/null
+++ b/tests/automatic/bug_fixes/python_deadlock/CMakeLists.txt
@@ -0,0 +1,19 @@
+set(TARGET_NAME python_deadlock_producer)
+
+
+prepare_asapo()
+
+find_package (Python3 REQUIRED)
+
+if (UNIX)
+    get_target_property(PYTHON_LIBS_PRODUCER python-lib-producer BINARY_DIR)
+else()
+    get_target_property(PYTHON_LIBS_PRODUCER asapo_producer BINARY_DIR)
+endif()
+
+file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/producer_deadlock.py TEST_SCRIPT )
+
+
+add_script_test("${TARGET_NAME}" "${TEST_SCRIPT} ${PYTHON_LIBS_PRODUCER} ${Python3_EXECUTABLE}" nomem)
+
+
diff --git a/tests/automatic/bug_fixes/python_deadlock/check_linux.sh b/tests/automatic/bug_fixes/python_deadlock/check_linux.sh
new file mode 100644
index 0000000000000000000000000000000000000000..414b73f8e86ef8f9712c147e5f8fc6a5461cac35
--- /dev/null
+++ b/tests/automatic/bug_fixes/python_deadlock/check_linux.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+set -e
+
+export PYTHONPATH=$2:${PYTHONPATH}
+export Python3_EXECUTABLE=$3
+
+$Python3_EXECUTABLE $1 $endpoint $beamtime_id $token
+
+
diff --git a/tests/automatic/bug_fixes/python_deadlock/producer_deadlock.py b/tests/automatic/bug_fixes/python_deadlock/producer_deadlock.py
new file mode 100644
index 0000000000000000000000000000000000000000..54f34221f0b9b0afd1202c7ab50d24f1238dd8b9
--- /dev/null
+++ b/tests/automatic/bug_fixes/python_deadlock/producer_deadlock.py
@@ -0,0 +1,27 @@
+from __future__ import print_function
+
+import asapo_producer
+import time
+import threading
+
+lock = threading.Lock()
+
+def callback(payload, err):
+    lock.acquire()  # to print
+    if isinstance(err, asapo_producer.AsapoServerWarning):
+        print("successfuly sent, but with warning from server: ", payload, err)
+    elif err is not None:
+        print("could not sent: ", payload, err)
+    else:
+        print("successfuly sent: ", payload)
+    lock.release()
+
+producer = asapo_producer.create_producer("google.com:8700",'processed', "beamtime", 'auto', "data_source", "token", 4, 5000)
+
+
+for i in range(1, 20):
+    print ("sending ",i)
+    producer.send_file(i, local_path="./not_exist", exposed_path="./whatever",
+                       ingest_mode=asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback=callback)
+    time.sleep(1)
+
diff --git a/tests/automatic/consumer/consumer_api/check_linux.sh b/tests/automatic/consumer/consumer_api/check_linux.sh
index 7e0f342a598a82b8de05a33e95ad617a981cbce8..ceb0979c90aecfd9942195d7c02906c3b60be526 100644
--- a/tests/automatic/consumer/consumer_api/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api/check_linux.sh
@@ -3,7 +3,7 @@
 beamtime_id=test_run
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=$BT_TEST_RUN_TOKEN
+token_test_run=$BT_TEST_RUN_RW_TOKEN
 
 
 
@@ -19,7 +19,7 @@ Cleanup() {
     nomad stop authorizer
     nomad stop broker
     echo "db.dropDatabase()" | mongo ${database_name}
-	rm -f 1_1 1
+	  rm -f 1_1 1
 }
 
 
@@ -53,6 +53,7 @@ echo hello1 > 1
 $@ 127.0.0.1:8400 $beamtime_id $token_test_run single
 
 
+
 #check datasets
 echo "db.dropDatabase()" | mongo ${database_name}
 
diff --git a/tests/automatic/consumer/consumer_api/check_windows.bat b/tests/automatic/consumer/consumer_api/check_windows.bat
index 9e3be222df17dbb674d4c7312abc02fe46a584cc..57eac5617585d641650da85d44c1de3bb6c46a00 100644
--- a/tests/automatic/consumer/consumer_api/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api/check_windows.bat
@@ -4,7 +4,7 @@ SET data_source=detector
 
 SET database_name=%beamtime_id%_%data_source%
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=%BT_TEST_RUN_TOKEN%
+set token_test_run=%BT_TEST_RUN_RW_TOKEN%
 
 call start_services.bat
 
diff --git a/tests/automatic/consumer/consumer_api/consumer_api.cpp b/tests/automatic/consumer/consumer_api/consumer_api.cpp
index a6f889c2abb8d5943c6c68bb8153a7db9e8e16ea..c1b0269ad30e0ced18a2eacb22a004a49af345c2 100644
--- a/tests/automatic/consumer/consumer_api/consumer_api.cpp
+++ b/tests/automatic/consumer/consumer_api/consumer_api.cpp
@@ -217,6 +217,15 @@ void TestSingle(const std::unique_ptr<asapo::Consumer>& consumer, const std::str
     M_AssertTrue(err == nullptr, "GetNextAfterResend no error");
     M_AssertTrue(fi.name == "2", "GetNextAfterResend filename");
 
+// delete stream
+
+    err = consumer->DeleteStream("default",asapo::DeleteStreamOptions{true,true});
+    M_AssertTrue(err == nullptr, "delete default stream ok");
+    err = consumer->DeleteStream("default",asapo::DeleteStreamOptions{true,true});
+    M_AssertTrue(err == asapo::ConsumerErrorTemplates::kWrongInput, "delete non existing stream error");
+    err = consumer->DeleteStream("default",asapo::DeleteStreamOptions{true,false});
+    M_AssertTrue(err == nullptr, "delete non existing stream ok");
+
 }
 
 
diff --git a/tests/automatic/consumer/consumer_api_python/check_linux.sh b/tests/automatic/consumer/consumer_api_python/check_linux.sh
index fabcbdc2e737426d4d2b1843c73685498dc84660..c927068adc53270c02648f545f5a7337ca4897da 100644
--- a/tests/automatic/consumer/consumer_api_python/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api_python/check_linux.sh
@@ -4,7 +4,7 @@ beamtime_id=test_run
 source_path=`pwd`/asap3/petra3/gpfs/p01/2019/data/$beamtime_id
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=$BT_TEST_RUN_TOKEN
+token_test_run=$BT_TEST_RUN_RW_TOKEN
 set -e
 
 trap Cleanup EXIT
@@ -17,8 +17,8 @@ Cleanup() {
     nomad stop broker >/dev/null
     nomad stop file_transfer >/dev/null
     nomad stop authorizer >/dev/null
-	echo "db.dropDatabase()" | mongo ${database_name} >/dev/null
-	rm $source_path/1 $source_path/1_1
+	  echo "db.dropDatabase()" | mongo ${database_name} >/dev/null
+	  rm $source_path/1 $source_path/1_1
 }
 
 nomad run nginx.nmd
@@ -30,6 +30,7 @@ nomad run authorizer.nmd
 
 mkdir -p $source_path
 echo -n hello1 > $source_path/1
+echo -n hello1 > $source_path/11
 echo -n hello1 > $source_path/1_1
 
 for i in `seq 1 5`;
diff --git a/tests/automatic/consumer/consumer_api_python/check_windows.bat b/tests/automatic/consumer/consumer_api_python/check_windows.bat
index 6f56883eb624186cf375822f27c913f558e7817e..e584db3026a8ce80bb65a0dc1d876b4426044f2c 100644
--- a/tests/automatic/consumer/consumer_api_python/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api_python/check_windows.bat
@@ -8,7 +8,7 @@ SET data_source=detector
 SET database_name=%beamtime_id%_%data_source%
 
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=%BT_TEST_RUN_TOKEN%
+set token_test_run=%BT_TEST_RUN_RW_TOKEN%
 call start_services.bat
 
 for /l %%x in (1, 1, 5) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"%%x","timestamp":0,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
@@ -28,6 +28,7 @@ mkdir %source_path%
 set PYTHONPATH=%1
 
 echo | set /p dummyName="hello1" > %source_path%\1
+echo | set /p dummyName="hello1" > %source_path%\11
 echo | set /p dummyName="hello1" > %source_path%\1_1
 
 python %3/consumer_api.py 127.0.0.1:8400 %source_path% %beamtime_id%  %token_test_run%  single || goto :error
diff --git a/tests/automatic/consumer/consumer_api_python/consumer_api.py b/tests/automatic/consumer/consumer_api_python/consumer_api.py
index 1b5bf5a153d00bfbd4336529a2ee9a9b9823799f..bb4bd0436898721d3655aa9c5dd42f1cea3bf21d 100644
--- a/tests/automatic/consumer/consumer_api_python/consumer_api.py
+++ b/tests/automatic/consumer/consumer_api_python/consumer_api.py
@@ -47,7 +47,7 @@ def assert_eq(val, expected, name):
 
 def check_file_transfer_service(consumer, group_id):
     consumer.set_timeout(1000)
-    data, meta = consumer.get_by_id(1, meta_only=False)
+    data, meta = consumer.get_by_id(1,stream = "stream1", meta_only=False)
     assert_eq(data.tostring().decode("utf-8"), "hello1", "check_file_transfer_service ok")
     data, meta = consumer.get_by_id(1, meta_only=False, stream = "streamfts")
     assert_eq(data.tostring().decode("utf-8"), "hello1", "check_file_transfer_service with auto size ok")
@@ -232,6 +232,20 @@ def check_single(consumer, group_id):
     else:
         exit_on_noerr("wrong query")
 
+    # delete stream
+
+    consumer.delete_stream(stream='default')
+    try:
+        consumer.delete_stream()
+    except asapo_consumer.AsapoWrongInputError as err:
+        print(err)
+        pass
+    else:
+        exit_on_noerr("should be AsapoWrongInputError on delete stream second time ")
+    consumer.delete_stream(error_on_not_exist = False)
+
+    # constructors
+
     consumer = asapo_consumer.create_consumer("bla", path, True, beamtime, "", token, 1000)
     try:
         consumer.get_last(meta_only=True)
@@ -249,6 +263,7 @@ def check_single(consumer, group_id):
     else:
         exit_on_noerr("should be AsapoWrongInputError")
 
+
 # interrupt
     thread_res = 0
     def long_call(consumer):
diff --git a/tests/automatic/file_transfer_service/rest_api/check_linux.sh b/tests/automatic/file_transfer_service/rest_api/check_linux.sh
index e7350241666f62bd20a802c6ea921af2f5e3f725..a1d034a96fa53935198eba0321a139045d414321 100644
--- a/tests/automatic/file_transfer_service/rest_api/check_linux.sh
+++ b/tests/automatic/file_transfer_service/rest_api/check_linux.sh
@@ -33,9 +33,12 @@ curl -o aaa --silent -H "Authorization: Bearer ${folder_token}" --data "{\"Folde
 
 curl -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer?sizeonly=true --stderr - | tee /dev/stderr | grep 100000
 
-
 diff -q aaa $file_transfer_folder/aaa
 
+chmod -r $file_transfer_folder/aaa
+curl --silent -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer?sizeonly=true --stderr - | tee /dev/stderr | grep "does not exist"
+
+
 dd if=/dev/zero of=$file_transfer_folder/big_file bs=1 count=0 seek=5368709120
 
 curl -vvv -o big_file -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"big_file\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer --stderr -  | tee /dev/stderr
diff --git a/tests/automatic/file_transfer_service/rest_api/check_windows.bat b/tests/automatic/file_transfer_service/rest_api/check_windows.bat
index 9f869e0d9973a630e45ea532b1fbe35a10179325..f34c5ac4ad95bb05b5d802d95b331ae5985db9c9 100644
--- a/tests/automatic/file_transfer_service/rest_api/check_windows.bat
+++ b/tests/automatic/file_transfer_service/rest_api/check_windows.bat
@@ -17,7 +17,9 @@ C:\Curl\curl.exe --silent --data "{\"Folder\":\"%file_transfer_folder%\",\"Beamt
 set /P folder_token=< token
 
 echo hello > %file_transfer_folder%\aaa
+ping 192.0.2.1 -n 1 -w 1000 > nul
 
+C:\Curl\curl.exe -v --silent -H "Authorization: Bearer %folder_token%" --data "{\"Folder\":\"%file_transfer_folder%\",\"FileName\":\"aaa\",\"Token\":\"%folder_token%\"}" 127.0.0.1:5008/v0.1/transfer
 C:\Curl\curl.exe --silent -H "Authorization: Bearer %folder_token%" --data "{\"Folder\":\"%file_transfer_folder%\",\"FileName\":\"aaa\",\"Token\":\"%folder_token%\"}" 127.0.0.1:5008/v0.1/transfer --stderr - | findstr hello  || goto :error
 
 goto :clean
diff --git a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
index 559e54d00c5a5e8d739580d82fdb2310e2884238..e7724e3c871f8a9cc2672c35b57d1aab9da134da 100644
--- a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
+++ b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
@@ -28,6 +28,7 @@ Args GetArgs(int argc, char* argv[]) {
         std::cout << "Wrong number of arguments" << std::endl;
         exit(EXIT_FAILURE);
     }
+    printf("%s %s",argv[1],argv[2]) ;
     return Args{argv[1], atoi(argv[2])};
 }
 
@@ -69,7 +70,7 @@ int main(int argc, char* argv[]) {
 
     Assert(err, args.keyword);
 
-    if (args.keyword == "OK") { // check retrieve
+    if (args.keyword == "OK") { // check retrieve and stream delete
         asapo::MessageMeta fi_db;
         asapo::MongoDBClient db_new;
         db_new.Connect("127.0.0.1", "data");
@@ -79,7 +80,9 @@ int main(int argc, char* argv[]) {
         err = db_new.GetById("data_test", 0, &fi_db);
         Assert(err, "No record");
 
+
         asapo::StreamInfo info;
+
         err = db.GetStreamInfo("data_test", &info);
         M_AssertEq(nullptr, err);
         M_AssertEq(fi.id, info.last_id);
@@ -90,6 +93,25 @@ int main(int argc, char* argv[]) {
         M_AssertEq("test1", info.name);
         M_AssertEq(true, info.finished);
         M_AssertEq("ns",info.next_stream);
+
+// delete stream
+        db.Insert("inprocess_test_blabla", fi, false);
+        db.Insert("inprocess_test_blabla1", fi, false);
+        db.Insert("acks_test_blabla", fi, false);
+        db.Insert("acks_test_blabla1", fi, false);
+        db.DeleteStream("test");
+        err = db.GetStreamInfo("data_test", &info);
+        M_AssertTrue(err!=nullptr);
+        err = db.GetStreamInfo("inprocess_test_blabla", &info);
+        M_AssertTrue(err!=nullptr);
+        err = db.GetStreamInfo("inprocess_test_blabla1", &info);
+        M_AssertTrue(err!=nullptr);
+        err = db.GetStreamInfo("acks_test_blabla", &info);
+        M_AssertTrue(err!=nullptr);
+        err = db.GetStreamInfo("acks_test_blabla1", &info);
+        M_AssertTrue(err!=nullptr);
+        err = db.DeleteStream("test1");
+        M_AssertTrue(err==nullptr);
     }
 
     return 0;
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index 879053cd9c71ab79e3fa5c8eeeebbce32df673a0..10c981da4702ac11d2f9a154d3346482f816d753 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -191,6 +191,25 @@ info_last = producer.last_stream()
 assert_eq(info_last['name'], "stream", "last stream")
 assert_eq(info_last['timestampCreated'] <= info_last['timestampLast'], True, "last is later than first")
 
+#delete_streams
+producer.delete_stream('stream')
+try:
+    producer.stream_info('stream')
+except asapo_producer.AsapoWrongInputError as e:
+    print(e)
+else:
+    print("should be error on stream info after stream was deleted")
+    sys.exit(1)
+producer.delete_stream('unknown_stream',error_on_not_exist = False)
+try:
+    producer.delete_stream('unknown_stream',error_on_not_exist = True)
+except asapo_producer.AsapoWrongInputError as e:
+    print(e)
+else:
+    print("should be error on delete unknown stream with flag")
+    sys.exit(1)
+
+
 # create with error
 try:
     producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', data_source, token, 0, 0)
diff --git a/tests/manual/python_tests/consumer/consumer_api_test.py b/tests/manual/python_tests/consumer/consumer_api_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8f270ad7ce638145eb7cfe04acd8416252daf57
--- /dev/null
+++ b/tests/manual/python_tests/consumer/consumer_api_test.py
@@ -0,0 +1,27 @@
+import asapo_consumer
+import time
+
+endpoint = "asap3-utl01.desy.de:8400"
+beamtime = "11012171"
+token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE2MzU3NTMxMDksImp0aSI6ImMyOTR0NWFodHY1am9vZHVoaGNnIiwic3ViIjoiYnRfMTEwMTIxNzEiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIiwid3JpdGUiXX19.kITePbv_dXY2ACxpAQ-PeQJPQtnR02bMoFrXq0Pbcm0"
+datasource = 'VmFyZXgx'
+
+consumer = asapo_consumer.create_consumer(endpoint,"/asap3/petra3/gpfs/p21.2/2021/data/11012171",False,
+                                          beamtime,datasource,token,20000)
+laststream=consumer.get_stream_list()[-1]["name"]
+print("laststream = " + laststream)
+
+group_id = consumer.generate_group_id()
+t1=time.time()
+
+data, meta = consumer.get_next(group_id, meta_only = False, stream=laststream)
+#meta['buf_id'] = 0
+#data = consumer.retrieve_data(meta)
+
+
+print ("total time: %f" % (time.time()-t1))
+
+print ('id:',meta['_id'])
+print ('file name:',meta['name'])
+print ('file content:',repr(data.tobytes()[:1000]))
+
diff --git a/tests/manual/python_tests/consumer/test_asap3.sh b/tests/manual/python_tests/consumer/test_asap3.sh
index 4326a17455a058fda23d31f0cf78115fa60e4ad1..1bea28cdbe132fd62f58cc3d0cbd95a3f658812d 100755
--- a/tests/manual/python_tests/consumer/test_asap3.sh
+++ b/tests/manual/python_tests/consumer/test_asap3.sh
@@ -1,4 +1,4 @@
-#export PYTHONPATH=/Users/yakubov/projects/asapo/cmake-build-debug/consumer/api/python
+export PYTHONPATH=/Users/yakubov/projects/asapo/cmake-build-debug/consumer/api/python
 export token=KmUDdacgBzaOD3NIJvN1NmKGqWKtx0DK-NyPjdpeWkc=
 export token=_bVfWrAh4S36-xadWkE71i_oOsPPgwFjzIh6UBFCxxU=
 python3 consumer_api.py asap3-utl.desy.de:8400 /gpfs/asapo/shared/test_dir/test_facility/gpfs/test/2019/data/asapo_test 11010325 $token