diff --git a/CMakeLists.txt b/CMakeLists.txt
index 8fba9c3a691810e151bfe10dbe06b20d5abab0ea..2b2936592048706383169fc48d63e8e011e05574 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -33,7 +33,7 @@ option(BUILD_DOCS "Uses doxygen to build the documentaion" OFF)
 option(BUILD_BROKER "Build broker" OFF)
 option(BUILD_PYTHON_DOCS "Uses sphinx to build the Python documentaion" OFF)
 
-option(BUILD_WORKER_TOOLS "Build worker tools" OFF)
+option(BUILD_CONSUMER_TOOLS "Build consumer tools" OFF)
 option(BUILD_EXAMPLES "Build examples" OFF)
 
 set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/CMakeModules/)
@@ -57,19 +57,19 @@ include(testing_cpp)
 
 include(prepare_asapo)
 
-if(BUILD_WORKER_TOOLS)
+if(BUILD_CONSUMER_TOOLS)
     set (BUILD_MONGODB_CLIENTLIB ON)
 endif()
 
 add_subdirectory(common/cpp)
 
-if (BUILD_BROKER)#TODO: Somehow make it clear that this is needed by examples/worker/getnext_broker
+if (BUILD_BROKER)#TODO: Somehow make it clear that this is needed by examples/consumer/getnext_broker
     add_subdirectory(broker)
 endif()
 
 add_subdirectory(producer)
 
-add_subdirectory(worker)
+add_subdirectory(consumer)
 
 add_subdirectory(receiver)
 
diff --git a/asapo_tools/src/asapo_tools/cli/token.go b/asapo_tools/src/asapo_tools/cli/token.go
index 7fdb749d21ebd6c1fae66a1ad629b29b5c00a0a5..9282a48e756263c3c6d987e18891d89979f29230 100644
--- a/asapo_tools/src/asapo_tools/cli/token.go
+++ b/asapo_tools/src/asapo_tools/cli/token.go
@@ -23,7 +23,7 @@ func generateToken(id string,secret string) string {
 }
 
 
-// GenerateToken generates token for workers
+// GenerateToken generates token for consumers
 func (cmd *command) CommandToken() error {
 
 	message_string := "Generate token"
diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go
index 64dc01e2e9827cf9a15dd46d3887d4897e357530..2ccfd229f1da583bd8a325e007422fe3da1d1dcd 100644
--- a/broker/src/asapo_broker/database/mongodb.go
+++ b/broker/src/asapo_broker/database/mongodb.go
@@ -133,7 +133,7 @@ func (db *Mongodb) InsertMeta(dbname string, s interface{}) error {
 	return c.Insert(s)
 }
 
-func (db *Mongodb) getMaxIndex(dbname string, dataset bool) (max_id int, err error) {
+func (db *Mongodb) getMaxIndex(dbname string, dataset bool) (max_id int) {
 	c := db.session.DB(dbname).C(data_collection_name)
 	var id Pointer
 	var q bson.M
@@ -142,11 +142,11 @@ func (db *Mongodb) getMaxIndex(dbname string, dataset bool) (max_id int, err err
 	} else {
 		q = nil
 	}
-	err = c.Find(q).Sort("-_id").Select(bson.M{"_id": 1}).One(&id)
+	err := c.Find(q).Sort("-_id").Select(bson.M{"_id": 1}).One(&id)
 	if err != nil {
-		return 0, nil
+		return 0
 	}
-	return id.ID, nil
+	return id.ID
 }
 
 func (db *Mongodb) createLocationPointers(dbname string, group_id string) (err error) {
@@ -178,12 +178,22 @@ func (db *Mongodb) incrementField(dbname string, group_id string, max_ind int, r
 	c := db.session.DB(dbname).C(pointer_collection_name)
 	_, err = c.Find(q).Apply(change, res)
 	if err == mgo.ErrNotFound {
-		return &DBError{utils.StatusNoData, err.Error()}
+		return &DBError{utils.StatusNoData, encodeAnswer(max_ind, max_ind)}
 	}
 	return err
 }
 
-func (db *Mongodb) GetRecordByIDRow(dbname string, id int, returnID bool, dataset bool) ([]byte, error) {
+func encodeAnswer(id, id_max int) string {
+	var r = struct {
+		Op     string `json:"op""`
+		Id     int    `json:"id""`
+		Id_max int    `json:"id_max""`
+	}{"get_record_by_id", id, id_max}
+	answer, _ := json.Marshal(&r)
+	return string(answer)
+}
+
+func (db *Mongodb) GetRecordByIDRow(dbname string, id, id_max int, dataset bool) ([]byte, error) {
 	var res map[string]interface{}
 	var q bson.M
 	if dataset {
@@ -195,25 +205,17 @@ func (db *Mongodb) GetRecordByIDRow(dbname string, id int, returnID bool, datase
 	c := db.session.DB(dbname).C(data_collection_name)
 	err := c.Find(q).One(&res)
 	if err != nil {
-		var r = struct {
-			Id int `json:"id""`
-		}{id}
-		answer, _ := json.Marshal(&r)
+		answer := encodeAnswer(id, id_max)
 		log_str := "error getting record id " + strconv.Itoa(id) + " for " + dbname + " : " + err.Error()
 		logger.Debug(log_str)
-		if returnID {
-			return nil, &DBError{utils.StatusNoData, string(answer)}
-		} else {
-			return nil, &DBError{utils.StatusNoData, err.Error()}
-		}
-
+		return nil, &DBError{utils.StatusNoData, answer}
 	}
 	log_str := "got record id " + strconv.Itoa(id) + " for " + dbname
 	logger.Debug(log_str)
 	return utils.MapToJson(&res)
 }
 
-func (db *Mongodb) GetRecordByID(dbname string, group_id string, id_str string, returnID bool, reset bool, dataset bool) ([]byte, error) {
+func (db *Mongodb) GetRecordByID(dbname string, group_id string, id_str string, dataset bool) ([]byte, error) {
 	id, err := strconv.Atoi(id_str)
 	if err != nil {
 		return nil, err
@@ -222,11 +224,9 @@ func (db *Mongodb) GetRecordByID(dbname string, group_id string, id_str string,
 	if err := db.checkDatabaseOperationPrerequisites(dbname, group_id); err != nil {
 		return nil, err
 	}
-	res, err := db.GetRecordByIDRow(dbname, id, returnID, dataset)
 
-	if reset {
-		db.setCounter(dbname, group_id, id)
-	}
+	max_ind := db.getMaxIndex(dbname, dataset)
+	res, err := db.GetRecordByIDRow(dbname, id, max_ind, dataset)
 
 	return res, err
 }
@@ -277,27 +277,24 @@ func (db *Mongodb) checkDatabaseOperationPrerequisites(db_name string, group_id
 	return nil
 }
 
-func (db *Mongodb) getCurrentPointer(db_name string, group_id string) (Pointer, error) {
-	max_ind, err := db.getMaxIndex(db_name, false)
-	if err != nil {
-		return Pointer{}, err
-	}
+func (db *Mongodb) getCurrentPointer(db_name string, group_id string, dataset bool) (Pointer, int, error) {
+	max_ind := db.getMaxIndex(db_name, dataset)
+
 	var curPointer Pointer
-	err = db.incrementField(db_name, group_id, max_ind, &curPointer)
+	err := db.incrementField(db_name, group_id, max_ind, &curPointer)
 	if err != nil {
-		return Pointer{}, err
+		return Pointer{}, 0, err
 	}
 
-	return curPointer, nil
+	return curPointer, max_ind, nil
 }
 
 func (db *Mongodb) GetNextRecord(db_name string, group_id string, dataset bool) ([]byte, error) {
-
 	if err := db.checkDatabaseOperationPrerequisites(db_name, group_id); err != nil {
 		return nil, err
 	}
 
-	curPointer, err := db.getCurrentPointer(db_name, group_id)
+	curPointer, max_ind, err := db.getCurrentPointer(db_name, group_id, dataset)
 	if err != nil {
 		log_str := "error getting next pointer for " + db_name + ", groupid: " + group_id + ":" + err.Error()
 		logger.Debug(log_str)
@@ -305,7 +302,7 @@ func (db *Mongodb) GetNextRecord(db_name string, group_id string, dataset bool)
 	}
 	log_str := "got next pointer " + strconv.Itoa(curPointer.Value) + " for " + db_name + ", groupid: " + group_id
 	logger.Debug(log_str)
-	return db.GetRecordByIDRow(db_name, curPointer.Value, true, dataset)
+	return db.GetRecordByIDRow(db_name, curPointer.Value, max_ind, dataset)
 
 }
 
@@ -315,13 +312,8 @@ func (db *Mongodb) GetLastRecord(db_name string, group_id string, dataset bool)
 		return nil, err
 	}
 
-	max_ind, err := db.getMaxIndex(db_name, dataset)
-	if err != nil {
-		log_str := "error getting last pointer for " + db_name + ", groupid: " + group_id + ":" + err.Error()
-		logger.Debug(log_str)
-		return nil, err
-	}
-	res, err := db.GetRecordByIDRow(db_name, max_ind, false, dataset)
+	max_ind := db.getMaxIndex(db_name, dataset)
+	res, err := db.GetRecordByIDRow(db_name, max_ind, max_ind, dataset)
 
 	db.setCounter(db_name, group_id, max_ind)
 
@@ -344,13 +336,17 @@ func (db *Mongodb) GetSize(db_name string) ([]byte, error) {
 	return json.Marshal(&rec)
 }
 
-func (db *Mongodb) ResetCounter(db_name string, group_id string) ([]byte, error) {
+func (db *Mongodb) ResetCounter(db_name string, group_id string, id_str string) ([]byte, error) {
+	id, err := strconv.Atoi(id_str)
+	if err != nil {
+		return nil, err
+	}
 
 	if err := db.checkDatabaseOperationPrerequisites(db_name, group_id); err != nil {
 		return nil, err
 	}
 
-	err := db.setCounter(db_name, group_id, 0)
+	err = db.setCounter(db_name, group_id, id)
 
 	return []byte(""), err
 }
@@ -416,13 +412,11 @@ func (db *Mongodb) ProcessRequest(db_name string, group_id string, op string, ex
 	case "next":
 		return db.GetNextRecord(db_name, group_id, dataset)
 	case "id":
-		return db.GetRecordByID(db_name, group_id, extra_param, true, false, dataset)
-	case "idreset":
-		return db.GetRecordByID(db_name, group_id, extra_param, true, true, dataset)
+		return db.GetRecordByID(db_name, group_id, extra_param, dataset)
 	case "last":
 		return db.GetLastRecord(db_name, group_id, dataset)
 	case "resetcounter":
-		return db.ResetCounter(db_name, group_id)
+		return db.ResetCounter(db_name, group_id, extra_param)
 	case "size":
 		return db.GetSize(db_name)
 	case "meta":
diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go
index 9038f6e28b05b1c242287b70c6e04532bc46b97b..12a4ba197e58b3479604c39b09881db5b6dd689c 100644
--- a/broker/src/asapo_broker/database/mongodb_test.go
+++ b/broker/src/asapo_broker/database/mongodb_test.go
@@ -91,7 +91,7 @@ func TestMongoDBGetNextErrorWhenRecordNotThereYet(t *testing.T) {
 	db.InsertRecord(dbname, &rec2)
 	_, err := db.GetNextRecord(dbname, groupId, false)
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
-	assert.Equal(t, "{\"id\":1}", err.Error())
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":2}", err.Error())
 }
 
 func TestMongoDBGetNextOK(t *testing.T) {
@@ -110,8 +110,17 @@ func TestMongoDBGetNextErrorOnNoMoreData(t *testing.T) {
 	db.GetNextRecord(dbname, groupId, false)
 	_, err := db.GetNextRecord(dbname, groupId, false)
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1}", err.(*DBError).Message)
 }
 
+//func TestMongoDBGetNextErrorOnDataAtAll(t *testing.T) {
+//	db.Connect(dbaddress)
+//	defer cleanup()
+//	_, err := db.GetNextRecord(dbname, groupId, false)
+//	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+//	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0}", err.(*DBError).Message)
+//}
+
 func TestMongoDBGetNextCorrectOrder(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
@@ -176,7 +185,7 @@ func TestMongoDBGetRecordByID(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.InsertRecord(dbname, &rec1)
-	res, err := db.GetRecordByID(dbname, "", "1", true, false, false)
+	res, err := db.GetRecordByID(dbname, "", "1", false)
 	assert.Nil(t, err)
 	assert.Equal(t, string(rec1_expect), string(res))
 }
@@ -185,9 +194,9 @@ func TestMongoDBGetRecordByIDFails(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 	db.InsertRecord(dbname, &rec1)
-	_, err := db.GetRecordByID(dbname, "", "2", true, false, false)
+	_, err := db.GetRecordByID(dbname, "", "2", false)
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
-	assert.Equal(t, "{\"id\":2}", err.Error())
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":1}", err.Error())
 }
 
 func TestMongoDBGetRecordNext(t *testing.T) {
@@ -276,24 +285,8 @@ func TestMongoDBGetSizeNoDatabase(t *testing.T) {
 	assert.NotNil(t, err)
 }
 
-func TestMongoDBGetRecordIDWithReset(t *testing.T) {
-	db.Connect(dbaddress)
-	defer cleanup()
-	db.InsertRecord(dbname, &rec1)
-	db.InsertRecord(dbname, &rec2)
-
-	res1, err1 := db.ProcessRequest(dbname, groupId, "idreset", "1")
-	res2, err2 := db.ProcessRequest(dbname, groupId, "next", "0")
-
-	assert.Nil(t, err1)
-	assert.Equal(t, string(rec1_expect), string(res1))
-	assert.Nil(t, err2)
-	assert.Equal(t, string(rec2_expect), string(res2))
-
-}
-
 func TestMongoDBGetRecordByIDNotConnected(t *testing.T) {
-	_, err := db.GetRecordByID(dbname, "", "2", true, false, false)
+	_, err := db.GetRecordByID(dbname, "", "2", false)
 	assert.Equal(t, utils.StatusError, err.(*DBError).Code)
 }
 
@@ -308,14 +301,13 @@ func TestMongoDBResetCounter(t *testing.T) {
 	assert.Nil(t, err1)
 	assert.Equal(t, string(rec1_expect), string(res1))
 
-	_, err_reset := db.ProcessRequest(dbname, groupId, "resetcounter", "0")
+	_, err_reset := db.ProcessRequest(dbname, groupId, "resetcounter", "1")
 	assert.Nil(t, err_reset)
 
 	res2, err2 := db.ProcessRequest(dbname, groupId, "next", "0")
 
 	assert.Nil(t, err2)
-	assert.Equal(t, string(rec1_expect), string(res2))
-
+	assert.Equal(t, string(rec2_expect), string(res2))
 }
 
 func TestMongoDBGetMetaOK(t *testing.T) {
@@ -424,7 +416,8 @@ func TestMongoDBQueryImagesOK(t *testing.T) {
 }
 
 var rec_dataset1 = TestDataset{1, 3, []TestRecord{rec1, rec2, rec3}}
-var rec_dataset2 = TestDataset{2, 2, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset1_incomplete = TestDataset{1, 4, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset2 = TestDataset{2, 4, []TestRecord{rec1, rec2, rec3}}
 var rec_dataset3 = TestDataset{3, 3, []TestRecord{rec3, rec2, rec2}}
 
 func TestMongoDBGetDataset(t *testing.T) {
@@ -443,15 +436,17 @@ func TestMongoDBGetDataset(t *testing.T) {
 	assert.Equal(t, rec_dataset1, res)
 }
 
-func TestMongoDBNoDataOnNotCompletedDataset(t *testing.T) {
+func TestMongoDBNoDataOnNotCompletedFirstDataset(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	db.InsertRecord(dbname, &rec_dataset2)
+	db.InsertRecord(dbname, &rec_dataset1_incomplete)
 
 	res_string, err := db.ProcessRequest(dbname, groupId, "next_dataset", "0")
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":0,\"id_max\":0}", err.(*DBError).Message)
+
 	assert.Equal(t, "", string(res_string))
 }
 
@@ -489,25 +484,6 @@ func TestMongoDBGetRecordLastDataSetOK(t *testing.T) {
 	assert.Equal(t, rec_dataset3, res)
 }
 
-func TestMongoDBGetDatasetIDWithReset(t *testing.T) {
-	db.Connect(dbaddress)
-	defer cleanup()
-	db.InsertRecord(dbname, &rec_dataset1)
-	db.InsertRecord(dbname, &rec_dataset3)
-
-	_, err1 := db.ProcessRequest(dbname, groupId, "idreset_dataset", "2")  //error while record is not complete, but reset counter to 2
-	res2s, err2 := db.ProcessRequest(dbname, groupId, "next_dataset", "0") // so getnext would get record number 3
-
-	assert.NotNil(t, err1)
-	assert.Nil(t, err2)
-
-	var res2 TestDataset
-	json.Unmarshal(res2s, &res2)
-
-	assert.Equal(t, rec_dataset3, res2)
-
-}
-
 func TestMongoDBGetDatasetID(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
diff --git a/broker/src/asapo_broker/server/get_id_test.go b/broker/src/asapo_broker/server/get_id_test.go
index 907035f9d77c141da11840212f14dd3538ae5179..2991d2e803563f8a2985b21fb1af98a8ed99f3be 100644
--- a/broker/src/asapo_broker/server/get_id_test.go
+++ b/broker/src/asapo_broker/server/get_id_test.go
@@ -53,13 +53,3 @@ func (suite *GetIDTestSuite) TestGetIdCallsCorrectRoutine() {
 	suite.Equal(http.StatusOK, w.Code, "GetImage OK")
 	suite.Equal("Hello", string(w.Body.Bytes()), "GetID sends data")
 }
-
-func (suite *GetIDTestSuite) TestGetIdWithResetCallsCorrectRoutine() {
-	suite.mock_db.On("ProcessRequest", expectedDBName, expectedGroupID, "idreset", "1").Return([]byte("Hello"), nil)
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request")))
-	ExpectCopyClose(suite.mock_db)
-
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix + "&reset=true")
-	suite.Equal(http.StatusOK, w.Code, "GetImage OK")
-	suite.Equal("Hello", string(w.Body.Bytes()), "GetID sends data")
-}
diff --git a/broker/src/asapo_broker/server/listroutes.go b/broker/src/asapo_broker/server/listroutes.go
index 6925d89e6500fd7e4fdaf22820b91547eae1f043..76595e4073f186237e83d33ead6f45f50d10dc7e 100644
--- a/broker/src/asapo_broker/server/listroutes.go
+++ b/broker/src/asapo_broker/server/listroutes.go
@@ -48,7 +48,7 @@ var listRoutes = utils.Routes{
 		routeQueryImages,
 	},
 	utils.Route{
-		"ResetCounter",
+		"ResetConter",
 		"Post",
 		"/database/{dbname}/{stream}/{groupid}/resetcounter",
 		routeResetCounter,
diff --git a/broker/src/asapo_broker/server/post_reset_counter.go b/broker/src/asapo_broker/server/post_reset_counter.go
index fd881f72f1fb81596077ae08f80f69888c9ad9d3..b67934d4e7dde2eea0ca198f8f3368f4eea29179 100644
--- a/broker/src/asapo_broker/server/post_reset_counter.go
+++ b/broker/src/asapo_broker/server/post_reset_counter.go
@@ -4,6 +4,15 @@ import (
 	"net/http"
 )
 
+func extractRequestParametersValue(r *http.Request) string {
+	val := r.URL.Query().Get("value")
+	if len(val) == 0 {
+		return "0"
+	}
+	return val
+}
+
 func routeResetCounter(w http.ResponseWriter, r *http.Request) {
-	processRequest(w, r, "resetcounter", "0", true)
+	val := extractRequestParametersValue(r)
+	processRequest(w, r, "resetcounter", val, true)
 }
diff --git a/broker/src/asapo_broker/server/post_reset_counter_test.go b/broker/src/asapo_broker/server/post_reset_counter_test.go
index 4cff3551b0f540cc73b54e3866f4c52a00e0fb6b..e0b67f29a1092ce9da74a28c5686581688fc0430 100644
--- a/broker/src/asapo_broker/server/post_reset_counter_test.go
+++ b/broker/src/asapo_broker/server/post_reset_counter_test.go
@@ -33,10 +33,10 @@ func TestResetCounterTestSuite(t *testing.T) {
 }
 
 func (suite *ResetCounterTestSuite) TestResetCounterOK() {
-	suite.mock_db.On("ProcessRequest", expectedDBName, expectedGroupID, "resetcounter", "0").Return([]byte(""), nil)
+	suite.mock_db.On("ProcessRequest", expectedDBName, expectedGroupID, "resetcounter", "10").Return([]byte(""), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request resetcounter")))
 	ExpectCopyClose(suite.mock_db)
 
-	w := doRequest("/database/"+expectedBeamtimeId+"/"+expectedStream+"/"+expectedGroupID+"/resetcounter"+correctTokenSuffix, "POST")
+	w := doRequest("/database/"+expectedBeamtimeId+"/"+expectedStream+"/"+expectedGroupID+"/resetcounter"+correctTokenSuffix+"&value=10", "POST")
 	suite.Equal(http.StatusOK, w.Code, "ResetCounter OK")
 }
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 27c23cec4bdd2d7c62153ed18e742af7392033a9..a912998bc3d4f697338209ab58352ce6addb53d0 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -58,10 +58,6 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 		return
 	}
 
-	if op == "id" && resetRequested(r) {
-		op = "idreset"
-	}
-
 	if datasetRequested(r) {
 		op = op + "_dataset"
 	}
diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go
index d53a67d592cfe4b7788302af4cbfc5d805249e46..7225499dc623385eb7f37273d4658c53b0de5233 100644
--- a/broker/src/asapo_broker/server/request_common.go
+++ b/broker/src/asapo_broker/server/request_common.go
@@ -15,7 +15,6 @@ func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string,
 
 func ValueTrue(r *http.Request, key string) bool {
 	val := r.URL.Query().Get(key)
-
 	if len(val) == 0 {
 		return false
 	}
@@ -23,13 +22,7 @@ func ValueTrue(r *http.Request, key string) bool {
 	if val == "true" {
 		return true
 	}
-
 	return false
-
-}
-
-func resetRequested(r *http.Request) bool {
-	return ValueTrue(r, "reset")
 }
 
 func datasetRequested(r *http.Request) bool {
diff --git a/common/cpp/include/common/error.h b/common/cpp/include/common/error.h
index 397b568589b27bdcc342eea716bbcd733bc14628..dd0d58d816a7b8ebb4a0ab18cc62f8edaf39757c 100644
--- a/common/cpp/include/common/error.h
+++ b/common/cpp/include/common/error.h
@@ -15,13 +15,14 @@ enum class ErrorType {
     kDBError,
     kReceiverError,
     kProducerError,
-    kWorkerError,
+    kConsumerError,
     kMemoryAllocationError,
     kEndOfFile,
 };
 
 class ErrorInterface;
 class ErrorTemplateInterface;
+class CustomErrorData;
 
 // nullptr == noError
 // Example check:
@@ -37,6 +38,8 @@ class ErrorInterface {
     virtual std::string Explain() const noexcept = 0;
     virtual void Append(const std::string& value) noexcept = 0;
     virtual ErrorType GetErrorType() const noexcept = 0;
+    virtual const CustomErrorData* GetCustomData() = 0;
+    virtual void SetCustomData(std::unique_ptr<CustomErrorData> data) = 0;
     virtual ~ErrorInterface() = default; // needed for unique_ptr to delete itself
 };
 
@@ -75,9 +78,15 @@ static inline std::ostream& operator<<(std::ostream& os, const Error& err) {
     return os;
 }
 
+class CustomErrorData {
+  public:
+    virtual ~CustomErrorData() = default;
+};
+
 class SimpleError: public ErrorInterface {
   private:
     std::string error_;
+    std::unique_ptr<CustomErrorData> custom_data_;
     ErrorType error_type_ = ErrorType::kAsapoError;
   public:
     explicit SimpleError(std::string error): error_{std::move(error)} {
@@ -86,6 +95,18 @@ class SimpleError: public ErrorInterface {
     SimpleError(std::string error, ErrorType error_type ): error_{std::move(error)}, error_type_{error_type} {
     }
 
+    const CustomErrorData* GetCustomData() {
+        if (custom_data_) {
+            return custom_data_.get();
+        } else {
+            return nullptr;
+        }
+    };
+
+    void SetCustomData(std::unique_ptr<CustomErrorData> data) {
+        custom_data_ = std::move(data);
+    }
+
     void Append(const std::string& value) noexcept override {
         error_ += ": " + value;
     }
diff --git a/config/bamboo/bamboo.java b/config/bamboo/bamboo.java
index 15b2429c6d6eff80c1b087108019bb97c7a42212..9ab570278b6c442dad7677871821705dd6f7b310 100644
--- a/config/bamboo/bamboo.java
+++ b/config/bamboo/bamboo.java
@@ -79,9 +79,9 @@ public class PlanSpec {
                                     .copyPattern("**/*")
                                     .location("build/coverage-asapo-producer"),
                                 new Artifact()
-                                    .name("Coverage-Worker")
+                                    .name("Coverage-Consumer")
                                     .copyPattern("**/*")
-                                    .location("build/coverage-hidra2-worker"),
+                                    .location("build/coverage-hidra2-consumer"),
                                 new Artifact()
                                     .name("Coverage-Broker")
                                     .copyPattern("coverage.html")
@@ -113,7 +113,7 @@ public class PlanSpec {
                                 new CommandTask()
                                     .description("build")
                                     .executable("bash")
-                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_CONSUMER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
                                     .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go GOROOT=/usr/local/go")
                                     .workingSubdirectory("build"),
                                 new CommandTask()
@@ -172,7 +172,7 @@ public class PlanSpec {
                                 new ScriptTask()
                                     .description("build with CMake")
                                     .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
-                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
+                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTS=ON -DBUILD_DOCS=ON -DBUILD_INTEGRATION_TESTS=ON -DBUILD_EXAMPLES=ON -DBUILD_CONSUMER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
                                     .workingSubdirectory("build"),
                                 new ScriptTask()
                                     .description("Run tests")
@@ -209,7 +209,7 @@ public class PlanSpec {
                                 new ScriptTask()
                                     .description("build with CMake")
                                     .interpreter(ScriptTaskProperties.Interpreter.BINSH_OR_CMDEXE)
-                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=OFF -DBUILD_DOCS=OFF -DBUILD_INTEGRATION_TESTS=OFF -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
+                                    .inlineBody("SET GOPATH=\"c:\\GoPath\"\n\"c:\\Program Files\\CMake\\bin\\cmake\" -DLIBCURL_DIR=c:/Curl -Dgtest_SOURCE_DIR=c:/googletest -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=OFF -DBUILD_DOCS=OFF -DBUILD_INTEGRATION_TESTS=OFF -DBUILD_EXAMPLES=ON -DBUILD_CONSUMER_TOOLS=ON -DBUILD_BROKER=ON -Dlibmongoc-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libmongoc-static-1.0\" -Dlibbson-static-1.0_DIR=\"c:\\mongo-c-driver\\lib\\cmake\\libbson-static-1.0\" ..\n\"c:\\Program Files\\CMake\\bin\\cmake\" --build .")
                                     .workingSubdirectory("build"),
                                 new ScriptTask()
                                     .description("Run tests")
@@ -261,9 +261,9 @@ public class PlanSpec {
                                     .location("build_release/producer/event_monitor_producer")
                                     .shared(true),
                                 new Artifact()
-                                    .name("Worker Linux")
+                                    .name("Consumer Linux")
                                     .copyPattern("getnext_broker")
-                                    .location("build_release/examples/worker/getnext_broker")
+                                    .location("build_release/examples/consumer/getnext_broker")
                                     .shared(true))
                             .tasks(new VcsCheckoutTask()
                                     .checkoutItems(new CheckoutItem().defaultRepository())
@@ -275,7 +275,7 @@ public class PlanSpec {
                                 new CommandTask()
                                     .description("build")
                                     .executable("bash")
-                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Release  -DBUILD_EXAMPLES=ON -DBUILD_WORKER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
+                                    .argument("-c \"/opt/asapo/cmake-3.7.2/bin/cmake -DLIBCURL_DIR=/opt/asapo/libcurl -DCMAKE_BUILD_TYPE=Release  -DBUILD_EXAMPLES=ON -DBUILD_CONSUMER_TOOLS=ON -DBUILD_BROKER=ON .. && make\"")
                                     .environmentVariables("PATH=$PATH:/usr/local/go/bin GOPATH=/opt/asapo/go GOROOT=/usr/local/go")
                                     .workingSubdirectory("build_release"),
                                 new CommandTask()
@@ -340,7 +340,7 @@ public class PlanSpec {
                                     .artifacts(new DownloadItem()
                                             .artifact("Dummy Producer Linux"),
                                         new DownloadItem()
-                                            .artifact("Worker Linux"),
+                                            .artifact("Consumer Linux"),
                                         new DownloadItem()
                                             .artifact("File Monitor Producer Windows"),
                                         new DownloadItem()
diff --git a/config/grafana/ASAP__O.json b/config/grafana/ASAP__O.json
index 8311cfdbc7471b7a48e8d075aecfa59b98486279..775b63e8e300e777d4b42848eb7abc501ed2d5e2 100644
--- a/config/grafana/ASAP__O.json
+++ b/config/grafana/ASAP__O.json
@@ -634,7 +634,7 @@
           "tags": []
         },
         {
-          "alias": "Worker",
+          "alias": "Consumer",
           "groupBy": [
             {
               "params": [
@@ -668,7 +668,7 @@
       "thresholds": [],
       "timeFrom": null,
       "timeShift": null,
-      "title": "Receiver/Worker Rates",
+      "title": "Receiver/Consumer Rates",
       "tooltip": {
         "shared": true,
         "sort": 0,
diff --git a/worker/CMakeLists.txt b/consumer/CMakeLists.txt
similarity index 82%
rename from worker/CMakeLists.txt
rename to consumer/CMakeLists.txt
index 93fb3845bddf214cc5872dde5f22167d763a0e80..9f330fbbd8aa45b36f2ddcdc2fd83a6eafbdb13b 100644
--- a/worker/CMakeLists.txt
+++ b/consumer/CMakeLists.txt
@@ -2,7 +2,7 @@ add_subdirectory(api/cpp)
 add_subdirectory(api/python)
 
 
-if(BUILD_WORKER_TOOLS)
+if(BUILD_CONSUMER_TOOLS)
     set (BUILD_MONGODB ON)
     add_subdirectory(tools)
 endif()
diff --git a/worker/api/cpp/CMakeLists.txt b/consumer/api/cpp/CMakeLists.txt
similarity index 87%
rename from worker/api/cpp/CMakeLists.txt
rename to consumer/api/cpp/CMakeLists.txt
index 36157a48e6f5b73853b14a309c776a172fa1c990..0a0564fbc76958091e6e5a4cf484b8d6a0a7ef32 100644
--- a/worker/api/cpp/CMakeLists.txt
+++ b/consumer/api/cpp/CMakeLists.txt
@@ -1,8 +1,7 @@
-set(TARGET_NAME asapo-worker)
+set(TARGET_NAME asapo-consumer)
 
 set(SOURCE_FILES
         src/data_broker.cpp
-        src/folder_data_broker.cpp
         src/server_data_broker.cpp
         src/tcp_client.cpp
         src/tcp_connection_pool.cpp)
@@ -27,8 +26,7 @@ target_link_libraries(${TARGET_NAME} ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}
 ################################
 # Testing
 ################################
-set(TEST_SOURCE_FILES unittests/test_worker_api.cpp
-                      unittests/test_folder_broker.cpp
+set(TEST_SOURCE_FILES unittests/test_consumer_api.cpp
                       unittests/test_server_broker.cpp
                       unittests/test_tcp_client.cpp
             unittests/test_tcp_connection_pool.cpp
diff --git a/consumer/api/cpp/include/asapo_consumer.h b/consumer/api/cpp/include/asapo_consumer.h
new file mode 100644
index 0000000000000000000000000000000000000000..10b8616d409a6d871d6b71e852c827ffab20930a
--- /dev/null
+++ b/consumer/api/cpp/include/asapo_consumer.h
@@ -0,0 +1,8 @@
+#ifndef ASAPO_ASAPO_CONSUMER_H
+#define ASAPO_ASAPO_CONSUMER_H
+
+#include "consumer/data_broker.h"
+#include "consumer/consumer_error.h"
+#include "common/version.h"
+
+#endif //ASAPO_ASAPO_CONSUMER_H
diff --git a/consumer/api/cpp/include/consumer/consumer_error.h b/consumer/api/cpp/include/consumer/consumer_error.h
new file mode 100644
index 0000000000000000000000000000000000000000..f751b0622d535496d227a48b0ef07f70f93e8db9
--- /dev/null
+++ b/consumer/api/cpp/include/consumer/consumer_error.h
@@ -0,0 +1,60 @@
+#ifndef ASAPO_CONSUMER_ERROR_H
+#define ASAPO_CONSUMER_ERROR_H
+
+#include "common/error.h"
+#include "common/io_error.h"
+
+namespace asapo {
+
+enum class ConsumerErrorType {
+    kNoData,
+    kEndOfStream,
+    kBrokerServersNotFound,
+    kBrokerServerError,
+    kIOError,
+    kWrongInput
+};
+
+using ConsumerErrorTemplate = ServiceErrorTemplate<ConsumerErrorType, ErrorType::kConsumerError>;
+
+
+class ConsumerErrorData : public CustomErrorData {
+  public:
+    uint64_t id;
+    uint64_t id_max;
+};
+
+
+namespace ConsumerErrorTemplates {
+
+auto const kIOError = ConsumerErrorTemplate{
+    "i/o error", ConsumerErrorType::kIOError
+};
+
+auto const kEndOfStream = ConsumerErrorTemplate{
+    "no data - end of stream", ConsumerErrorType::kEndOfStream
+};
+
+auto const kNoData = ConsumerErrorTemplate{
+    "no data", ConsumerErrorType::kNoData
+};
+
+auto const kWrongInput = ConsumerErrorTemplate{
+    "wrong input", ConsumerErrorType::kWrongInput
+};
+
+auto const kBrokerServerError = ConsumerErrorTemplate{
+    "error from broker server", ConsumerErrorType::kBrokerServerError
+};
+
+auto const kBrokerServersNotFound = ConsumerErrorTemplate{
+    "cannot find brokers", ConsumerErrorType::kBrokerServersNotFound
+};
+
+
+
+}
+}
+
+#endif //ASAPO_CONSUMER_ERROR_H
+
diff --git a/worker/api/cpp/include/worker/data_broker.h b/consumer/api/cpp/include/consumer/data_broker.h
similarity index 92%
rename from worker/api/cpp/include/worker/data_broker.h
rename to consumer/api/cpp/include/consumer/data_broker.h
index 9629b33898a5a8df3662c42068d01870331fea4b..7c274eca6404e2cefd3fb3c5dbd5c9ecc3d86e7a 100644
--- a/worker/api/cpp/include/worker/data_broker.h
+++ b/consumer/api/cpp/include/consumer/data_broker.h
@@ -11,15 +11,13 @@ namespace asapo {
 
 class DataBroker {
   public:
-    //! Connect to the data source - will scan file folders or connect to the database.
-// TODO: do we need this?
-    virtual Error Connect() = 0;
     //! Reset counter for the specific group.
     /*!
       \param group_id - group id to use.
       \return nullptr of command was successful, otherwise error.
     */
-    virtual Error ResetCounter(std::string group_id) = 0;
+    virtual Error ResetLastReadMarker(std::string group_id) = 0;
+    virtual Error SetLastReadMarker(uint64_t value, std::string group_id) = 0;
 
     //! Set timeout for broker operations. Default - no timeout
     virtual void SetTimeout(uint64_t timeout_ms) = 0;
@@ -30,7 +28,7 @@ class DataBroker {
       \param err - return nullptr of operation succeed, error otherwise.
       \return number of datasets.
     */
-    virtual uint64_t GetNDataSets(Error* err) = 0;
+    virtual uint64_t GetCurrentSize(Error* err) = 0;
 
     //! Generate new GroupID.
     /*!
@@ -125,8 +123,6 @@ class DataBroker {
 /*! A class to create a data broker instance. The class's only function Create is used for this*/
 class DataBrokerFactory {
   public:
-    static std::unique_ptr<DataBroker> CreateFolderBroker(const std::string& source_name,
-            Error* error) noexcept;
     static std::unique_ptr<DataBroker> CreateServerBroker(std::string server_name, std::string source_path,
             SourceCredentials source,
             Error* error) noexcept;
diff --git a/worker/api/cpp/src/data_broker.cpp b/consumer/api/cpp/src/data_broker.cpp
similarity index 79%
rename from worker/api/cpp/src/data_broker.cpp
rename to consumer/api/cpp/src/data_broker.cpp
index e18bbe1334e7631f2ea8e3d73ac35219876f3545..48abcb63e5dc0ed2d694b33ab5291f162fae5aed 100644
--- a/worker/api/cpp/src/data_broker.cpp
+++ b/consumer/api/cpp/src/data_broker.cpp
@@ -1,5 +1,4 @@
-#include "worker/data_broker.h"
-#include "folder_data_broker.h"
+#include "consumer/data_broker.h"
 #include "server_data_broker.h"
 
 
@@ -26,11 +25,6 @@ std::unique_ptr<DataBroker> Create(const std::string& source_name,
 
 }
 
-std::unique_ptr<DataBroker> DataBrokerFactory::CreateFolderBroker(const std::string& source_name,
-        Error* error) noexcept {
-    return Create<FolderDataBroker>(source_name, error);
-};
-
 std::unique_ptr<DataBroker> DataBrokerFactory::CreateServerBroker(std::string server_name, std::string source_path,
         SourceCredentials source,
         Error* error) noexcept {
diff --git a/worker/api/cpp/src/net_client.h b/consumer/api/cpp/src/net_client.h
similarity index 100%
rename from worker/api/cpp/src/net_client.h
rename to consumer/api/cpp/src/net_client.h
diff --git a/worker/api/cpp/src/server_data_broker.cpp b/consumer/api/cpp/src/server_data_broker.cpp
similarity index 73%
rename from worker/api/cpp/src/server_data_broker.cpp
rename to consumer/api/cpp/src/server_data_broker.cpp
index 6f00f55290af55dad743db0cd0c6c8816600f47a..7fe7ce71fe4d7ec48cfaf5502cd9c14c2fab013b 100644
--- a/worker/api/cpp/src/server_data_broker.cpp
+++ b/consumer/api/cpp/src/server_data_broker.cpp
@@ -7,29 +7,59 @@
 #include "http_client/http_error.h"
 #include "tcp_client.h"
 
-
-#include "asapo_worker.h"
+#include "asapo_consumer.h"
 
 using std::chrono::system_clock;
 
 namespace asapo {
 
-Error HttpCodeToWorkerError(const HttpCode& code) {
+Error GetIDsFromJson(const std::string& json_string, uint64_t* id, uint64_t* id_max) {
+    JsonStringParser parser(json_string);
+    Error err;
+    if ((err = parser.GetUInt64("id", id)) || (err = parser.GetUInt64("id_max", id_max))) {
+        return err;
+    }
+    return nullptr;
+}
+
+Error ErrorFromNoDataResponse(const std::string& response) {
+    if (response.find("get_record_by_id") != std::string::npos) {
+        uint64_t id, id_max;
+        auto parse_error = GetIDsFromJson(response, &id, &id_max);
+        if (parse_error) {
+            return ConsumerErrorTemplates::kBrokerServerError.Generate("malformed response - " + response);
+        }
+        Error err;
+        if (id >= id_max ) {
+            err = ConsumerErrorTemplates::kEndOfStream.Generate();
+        } else {
+            err = ConsumerErrorTemplates::kNoData.Generate();
+        }
+        ConsumerErrorData* error_data = new ConsumerErrorData;
+        error_data->id = id;
+        error_data->id_max = id_max;
+        err->SetCustomData(std::unique_ptr<CustomErrorData> {error_data});
+        return err;
+    }
+    return ConsumerErrorTemplates::kNoData.Generate();
+}
+
+Error ErrorFromServerResponce(const std::string& response, const HttpCode& code) {
     switch (code) {
     case HttpCode::OK:
         return nullptr;
     case HttpCode::BadRequest:
-        return WorkerErrorTemplates::kWrongInput.Generate();
+        return ConsumerErrorTemplates::kWrongInput.Generate(response);
     case HttpCode::Unauthorized:
-        return WorkerErrorTemplates::kAuthorizationError.Generate();
+        return ConsumerErrorTemplates::kWrongInput.Generate(response);
     case HttpCode::InternalServerError:
-        return WorkerErrorTemplates::kInternalError.Generate();
+        return ConsumerErrorTemplates::kBrokerServerError.Generate(response);
     case HttpCode::NotFound:
-        return WorkerErrorTemplates::kErrorReadingSource.Generate();
+        return ConsumerErrorTemplates::kBrokerServersNotFound.Generate(response);
     case HttpCode::Conflict:
-        return asapo::ErrorTemplates::kEndOfFile.Generate("No Data");
+        return ErrorFromNoDataResponse(response);
     default:
-        return WorkerErrorTemplates::kUnknownIOError.Generate();
+        return ConsumerErrorTemplates::kBrokerServerError.Generate(response);
     }
 }
 
@@ -46,39 +76,10 @@ server_uri_{std::move(server_uri)}, source_path_{std::move(source_path)}, source
 
 }
 
-Error ServerDataBroker::Connect() {
-    return nullptr;
-}
-
 void ServerDataBroker::SetTimeout(uint64_t timeout_ms) {
     timeout_ms_ = timeout_ms;
 }
 
-std::string GetIDFromJson(const std::string& json_string, Error* err) {
-    JsonStringParser parser(json_string);
-    uint64_t id;
-    if ((*err = parser.GetUInt64("id", &id)) != nullptr) {
-        return "";
-    }
-    return std::to_string(id);
-}
-
-void ServerDataBroker::ProcessServerError(Error* err, const std::string& response, std::string* op) {
-    (*err)->Append(response);
-    if ((*err)->GetErrorType() == asapo::ErrorType::kEndOfFile) {
-        if (response.find("id") != std::string::npos) {
-            Error parse_error;
-            auto id = GetIDFromJson(response, &parse_error);
-            if (parse_error) {
-                (*err)->Append(parse_error->Explain());
-                return;
-            }
-            *op = id;
-        }
-    }
-    return;
-}
-
 std::string ServerDataBroker::RequestWithToken(std::string uri) {
     return std::move(uri) + "?token=" + source_credentials_.user_token;
 }
@@ -87,16 +88,17 @@ Error ServerDataBroker::ProcessRequest(std::string* response, const RequestInfo&
     Error err;
     HttpCode code;
     if (request.post) {
-        *response = httpclient__->Post(RequestWithToken(request.host + request.api) + request.extra_params, request.body, &code,
-                                       &err);
+        *response =
+            httpclient__->Post(RequestWithToken(request.host + request.api) + request.extra_params, request.body, &code,
+                               &err);
     } else {
         *response = httpclient__->Get(RequestWithToken(request.host + request.api) + request.extra_params, &code, &err);
     }
     if (err != nullptr) {
         current_broker_uri_ = "";
-        return err;
+        return ConsumerErrorTemplates::kBrokerServerError.Generate("error processing request: " + err->Explain());
     }
-    return HttpCodeToWorkerError(code);
+    return ErrorFromServerResponce(*response, code);
 }
 
 Error ServerDataBroker::GetBrokerUri() {
@@ -112,11 +114,24 @@ Error ServerDataBroker::GetBrokerUri() {
     err = ProcessRequest(&current_broker_uri_, ri);
     if (err != nullptr || current_broker_uri_.empty()) {
         current_broker_uri_ = "";
-        return TextError("cannot get broker uri from " + server_uri_);
+        return ConsumerErrorTemplates::kBrokerServersNotFound.Generate(" on " + server_uri_
+                + (err != nullptr ? ": " + err->Explain()
+                   : ""));
     }
     return nullptr;
 }
 
+void ServerDataBroker::ProcessServerError(Error* err, const std::string& response, std::string* op) {
+    if (*err == ConsumerErrorTemplates::kNoData) {
+        auto error_data = static_cast<const ConsumerErrorData*>((*err)->GetCustomData());
+        if (error_data == nullptr) {
+            *err = ConsumerErrorTemplates::kBrokerServerError.Generate("malformed response - " + response);
+            return;
+        }
+        *op = std::to_string(error_data->id);
+    }
+    return;
+}
 
 Error ServerDataBroker::GetRecordFromServer(std::string* response, std::string group_id, GetImageServerOperation op,
                                             bool dataset) {
@@ -141,14 +156,16 @@ Error ServerDataBroker::GetRecordFromServer(std::string* response, std::string g
 
         ProcessServerError(&err, *response, &request_suffix);
 
+        if (err == ConsumerErrorTemplates::kBrokerServerError && request_suffix == "next") {
+            return err;
+        }
+
         if (elapsed_ms >= timeout_ms_) {
-            err = IOErrorTemplates::kTimeout.Generate( ", last error: " + err->Explain());
             return err;
         }
         std::this_thread::sleep_for(std::chrono::milliseconds(100));
         elapsed_ms += 100;
     }
-
     return nullptr;
 }
 
@@ -175,7 +192,7 @@ Error ServerDataBroker::GetImageFromServer(GetImageServerOperation op, uint64_t
                                            FileInfo* info,
                                            FileData* data) {
     if (info == nullptr) {
-        return WorkerErrorTemplates::kWrongInput.Generate();
+        return ConsumerErrorTemplates::kWrongInput.Generate();
     }
 
     Error err;
@@ -190,15 +207,15 @@ Error ServerDataBroker::GetImageFromServer(GetImageServerOperation op, uint64_t
     }
 
     if (!info->SetFromJson(response)) {
-        return WorkerErrorTemplates::kErrorReadingSource.Generate(std::string(":") + response);
+        return ConsumerErrorTemplates::kBrokerServerError.Generate(std::string("malformed response:") + response);
     }
 
     return GetDataIfNeeded(info, data);
 }
 
 Error ServerDataBroker::RetrieveData(FileInfo* info, FileData* data) {
-    if (data == nullptr || info == nullptr ) {
-        return TextError("pointers are empty");
+    if (data == nullptr || info == nullptr) {
+        return ConsumerErrorTemplates::kWrongInput.Generate("pointers are empty");
     }
 
     if (DataCanBeInBuffer(info)) {
@@ -211,9 +228,12 @@ Error ServerDataBroker::RetrieveData(FileInfo* info, FileData* data) {
 
     Error error;
     *data = io__->GetDataFromFile(info->FullName(source_path_), &info->size, &error);
-    return error;
-}
+    if (error) {
+        return ConsumerErrorTemplates::kIOError.Generate(error->Explain());
+    }
 
+    return nullptr;
+}
 
 Error ServerDataBroker::GetDataIfNeeded(FileInfo* info, FileData* data) {
     if (data == nullptr) {
@@ -232,7 +252,6 @@ Error ServerDataBroker::TryGetDataFromBuffer(const FileInfo* info, FileData* dat
     return net_client__->GetData(info, data);
 }
 
-
 std::string ServerDataBroker::GenerateNewGroupId(Error* err) {
     RequestInfo ri;
     ri.api = "/creategroup";
@@ -240,13 +259,10 @@ std::string ServerDataBroker::GenerateNewGroupId(Error* err) {
     return BrokerRequestWithTimeout(ri, err);
 }
 
-
 std::string ServerDataBroker::AppendUri(std::string request_string) {
     return current_broker_uri_ + "/" + std::move(request_string);
 }
 
-
-
 std::string ServerDataBroker::BrokerRequestWithTimeout(RequestInfo request, Error* err) {
     uint64_t elapsed_ms = 0;
     std::string response;
@@ -255,21 +271,21 @@ std::string ServerDataBroker::BrokerRequestWithTimeout(RequestInfo request, Erro
         if (*err == nullptr) {
             request.host = current_broker_uri_;
             *err = ProcessRequest(&response, request);
-            if (*err == nullptr || (*err)->GetErrorType() == ErrorType::kEndOfFile || (*err) == WorkerErrorTemplates::kWrongInput) {
+            if (*err == nullptr || (*err) == ConsumerErrorTemplates::kWrongInput) {
                 return response;
             }
         }
         std::this_thread::sleep_for(std::chrono::milliseconds(100));
         elapsed_ms += 100;
     }
-    *err = IOErrorTemplates::kTimeout.Generate( ", last error: " + (*err)->Explain());
     return "";
 }
 
-Error ServerDataBroker::ResetCounter(std::string group_id) {
+Error ServerDataBroker::SetLastReadMarker(uint64_t value, std::string group_id) {
     RequestInfo ri;
     ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.stream + "/" + std::move(
                  group_id) + "/resetcounter";
+    ri.extra_params = "&value=" + std::to_string(value);
     ri.post = true;
 
     Error err;
@@ -277,7 +293,11 @@ Error ServerDataBroker::ResetCounter(std::string group_id) {
     return err;
 }
 
-uint64_t ServerDataBroker::GetNDataSets(Error* err) {
+Error ServerDataBroker::ResetLastReadMarker(std::string group_id) {
+    return SetLastReadMarker(0, group_id);
+}
+
+uint64_t ServerDataBroker::GetCurrentSize(Error* err) {
     RequestInfo ri;
     ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.stream + "/size";
     auto responce = BrokerRequestWithTimeout(ri, err);
@@ -297,13 +317,11 @@ Error ServerDataBroker::GetById(uint64_t id, FileInfo* info, std::string group_i
     return GetImageFromServer(GetImageServerOperation::GetID, id, group_id, info, data);
 }
 
-
 Error ServerDataBroker::GetRecordFromServerById(uint64_t id, std::string* response, std::string group_id,
                                                 bool dataset) {
     RequestInfo ri;
     ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.stream + "/" + std::move(
                  group_id) + "/" + std::to_string(id);
-    ri.extra_params = "&reset=true";
     if (dataset) {
         ri.extra_params += "&dataset=true";
     }
@@ -320,7 +338,6 @@ std::string ServerDataBroker::GetBeamtimeMeta(Error* err) {
     return BrokerRequestWithTimeout(ri, err);
 }
 
-
 DataSet ServerDataBroker::DecodeDatasetFromResponse(std::string response, Error* err) {
     auto parser = JsonStringParser(std::move(response));
 
@@ -330,7 +347,7 @@ DataSet ServerDataBroker::DecodeDatasetFromResponse(std::string response, Error*
     (parse_err = parser.GetArrayRawStrings("images", &vec_fi_endcoded)) ||
     (parse_err = parser.GetUInt64("_id", &id));
     if (parse_err) {
-        *err = WorkerErrorTemplates::kInternalError.Generate("cannot parse response:" + parse_err->Explain());
+        *err = ConsumerErrorTemplates::kBrokerServerError.Generate("malformed response:" + parse_err->Explain());
         return {0, FileInfos{}};
     }
 
@@ -338,7 +355,7 @@ DataSet ServerDataBroker::DecodeDatasetFromResponse(std::string response, Error*
     for (auto fi_encoded : vec_fi_endcoded) {
         FileInfo fi;
         if (!fi.SetFromJson(fi_encoded)) {
-            *err = WorkerErrorTemplates::kInternalError.Generate("cannot parse response:" + fi_encoded);
+            *err = ConsumerErrorTemplates::kBrokerServerError.Generate("malformed response:" + fi_encoded);
             return {0, FileInfos{}};
         }
         res.emplace_back(fi);
@@ -346,7 +363,6 @@ DataSet ServerDataBroker::DecodeDatasetFromResponse(std::string response, Error*
     return {id, std::move(res)};
 }
 
-
 FileInfos ServerDataBroker::QueryImages(std::string query, Error* err) {
     RequestInfo ri;
     ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.stream + "/0/queryimages";
@@ -355,7 +371,6 @@ FileInfos ServerDataBroker::QueryImages(std::string query, Error* err) {
 
     auto response = BrokerRequestWithTimeout(ri, err);
     if (*err) {
-        (*err)->Append(response);
         return FileInfos{};
     }
 
@@ -391,5 +406,4 @@ DataSet ServerDataBroker::GetDatasetById(uint64_t id, std::string group_id, Erro
     return GetDatasetFromServer(GetImageServerOperation::GetID, id, std::move(group_id), err);
 }
 
-
 }
diff --git a/worker/api/cpp/src/server_data_broker.h b/consumer/api/cpp/src/server_data_broker.h
similarity index 88%
rename from worker/api/cpp/src/server_data_broker.h
rename to consumer/api/cpp/src/server_data_broker.h
index e87bb996875ca26d55db715d43da7ae3776f7250..287a398a9d623b9d076d0d0eb7b4d2581c0210c8 100644
--- a/worker/api/cpp/src/server_data_broker.h
+++ b/consumer/api/cpp/src/server_data_broker.h
@@ -1,14 +1,15 @@
 #ifndef ASAPO_SERVER_DATA_BROKER_H
 #define ASAPO_SERVER_DATA_BROKER_H
 
-#include "worker/data_broker.h"
+#include "consumer/data_broker.h"
 #include "io/io.h"
 #include "http_client/http_client.h"
 #include "net_client.h"
 
 namespace asapo {
 
-Error HttpCodeToWorkerError(const HttpCode& code);
+Error ErrorFromServerResponce(const std::string& response, const HttpCode& code);
+Error ErrorFromNoDataResponse(const std::string& response);
 
 enum class GetImageServerOperation {
     GetNext,
@@ -28,13 +29,13 @@ struct RequestInfo {
 class ServerDataBroker final : public asapo::DataBroker {
   public:
     explicit ServerDataBroker(std::string server_uri, std::string source_path, SourceCredentials source);
-    Error Connect() override;
-    Error ResetCounter(std::string group_id) override;
+    Error ResetLastReadMarker(std::string group_id) override;
+    Error SetLastReadMarker(uint64_t value, std::string group_id) override;
     Error GetNext(FileInfo* info, std::string group_id, FileData* data) override;
     Error GetLast(FileInfo* info, std::string group_id, FileData* data) override;
     std::string GenerateNewGroupId(Error* err) override;
     std::string GetBeamtimeMeta(Error* err) override;
-    uint64_t GetNDataSets(Error* err) override;
+    uint64_t GetCurrentSize(Error* err) override;
     Error GetById(uint64_t id, FileInfo* info, std::string group_id, FileData* data) override;
     void SetTimeout(uint64_t timeout_ms) override;
     FileInfos QueryImages(std::string query, Error* err) override;
diff --git a/worker/api/cpp/src/tcp_client.cpp b/consumer/api/cpp/src/tcp_client.cpp
similarity index 100%
rename from worker/api/cpp/src/tcp_client.cpp
rename to consumer/api/cpp/src/tcp_client.cpp
diff --git a/worker/api/cpp/src/tcp_client.h b/consumer/api/cpp/src/tcp_client.h
similarity index 100%
rename from worker/api/cpp/src/tcp_client.h
rename to consumer/api/cpp/src/tcp_client.h
diff --git a/worker/api/cpp/src/tcp_connection_pool.cpp b/consumer/api/cpp/src/tcp_connection_pool.cpp
similarity index 100%
rename from worker/api/cpp/src/tcp_connection_pool.cpp
rename to consumer/api/cpp/src/tcp_connection_pool.cpp
diff --git a/worker/api/cpp/src/tcp_connection_pool.h b/consumer/api/cpp/src/tcp_connection_pool.h
similarity index 100%
rename from worker/api/cpp/src/tcp_connection_pool.h
rename to consumer/api/cpp/src/tcp_connection_pool.h
diff --git a/worker/api/cpp/unittests/mocking.h b/consumer/api/cpp/unittests/mocking.h
similarity index 92%
rename from worker/api/cpp/unittests/mocking.h
rename to consumer/api/cpp/unittests/mocking.h
index 83ba19658be91427342a66bb7eb53760e105cdc2..be0623021ed4c08aca9f4a128232420f0198e9e5 100644
--- a/worker/api/cpp/unittests/mocking.h
+++ b/consumer/api/cpp/unittests/mocking.h
@@ -1,5 +1,5 @@
-#ifndef ASAPO_WORKER_MOCKING_H
-#define ASAPO_WORKER_MOCKING_H
+#ifndef ASAPO_CONSUMER_MOCKING_H
+#define ASAPO_CONSUMER_MOCKING_H
 
 #include <gtest/gtest.h>
 #include <gmock/gmock.h>
@@ -47,4 +47,4 @@ class MockTCPConnectionPool : public asapo::TcpConnectionPool {
 
 
 
-#endif //ASAPO_WORKER_MOCKING_H
+#endif //ASAPO_CONSUMER_MOCKING_H
diff --git a/worker/api/cpp/unittests/test_worker_api.cpp b/consumer/api/cpp/unittests/test_consumer_api.cpp
similarity index 55%
rename from worker/api/cpp/unittests/test_worker_api.cpp
rename to consumer/api/cpp/unittests/test_consumer_api.cpp
index 7c6fa7952f7457ed6910ca6f1686066fa9a42047..9b3037f7baf7669c9f635b1d19b9af8dfedb2a5a 100644
--- a/worker/api/cpp/unittests/test_worker_api.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_api.cpp
@@ -1,13 +1,11 @@
 #include <gmock/gmock.h>
 
-#include "worker/data_broker.h"
-#include "../src/folder_data_broker.h"
+#include "consumer/data_broker.h"
 #include "../src/server_data_broker.h"
 #include "common/error.h"
 
 using asapo::DataBrokerFactory;
 using asapo::DataBroker;
-using asapo::FolderDataBroker;
 using asapo::ServerDataBroker;
 
 using asapo::Error;
@@ -27,22 +25,6 @@ class DataBrokerFactoryTests : public Test {
 };
 
 
-TEST_F(DataBrokerFactoryTests, CreateFolderDataSource) {
-
-    auto data_broker = DataBrokerFactory::CreateFolderBroker("path/to/file", &error);
-
-    ASSERT_THAT(error, Eq(nullptr));
-    ASSERT_THAT(dynamic_cast<FolderDataBroker*>(data_broker.get()), Ne(nullptr));
-}
-
-TEST_F(DataBrokerFactoryTests, FailCreateDataSourceWithEmptySource) {
-
-    auto data_broker = DataBrokerFactory::CreateFolderBroker("", &error);
-
-    ASSERT_THAT(error->Explain(), Eq("Empty Data Source"));
-    ASSERT_THAT(data_broker.get(), Eq(nullptr));
-}
-
 TEST_F(DataBrokerFactoryTests, CreateServerDataSource) {
 
     auto data_broker = DataBrokerFactory::CreateServerBroker("server", "path", asapo::SourceCredentials{"beamtime_id", "", "token"}, &error);
diff --git a/worker/api/cpp/unittests/test_server_broker.cpp b/consumer/api/cpp/unittests/test_server_broker.cpp
similarity index 79%
rename from worker/api/cpp/unittests/test_server_broker.cpp
rename to consumer/api/cpp/unittests/test_server_broker.cpp
index 092aa793be10cfb1de36506bfbd3677eb223e4c4..487546662f3966ccf3d1fc55e7caa8974bc78a92 100644
--- a/worker/api/cpp/unittests/test_server_broker.cpp
+++ b/consumer/api/cpp/unittests/test_server_broker.cpp
@@ -1,8 +1,8 @@
 #include <gmock/gmock.h>
 #include "gtest/gtest.h"
 
-#include "worker/data_broker.h"
-#include "worker/worker_error.h"
+#include "consumer/data_broker.h"
+#include "consumer/consumer_error.h"
 #include "io/io.h"
 #include "../../../../common/cpp/src/system_io/system_io.h"
 #include "../src/server_data_broker.h"
@@ -98,7 +98,6 @@ class ServerDataBrokerTests : public Test {
                     Return("")
                 ));
     }
-
     void MockGetBrokerUri() {
         EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/discovery/broker"), _, _)).WillOnce(DoAll(
                     SetArgPointee<1>(HttpCode::OK),
@@ -125,14 +124,10 @@ class ServerDataBrokerTests : public Test {
     }
 };
 
-TEST_F(ServerDataBrokerTests, CanConnect) {
-    auto return_code = data_broker->Connect();
-    ASSERT_THAT(return_code, Eq(nullptr));
-}
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsErrorOnWrongInput) {
     auto err = data_broker->GetNext(nullptr, "", nullptr);
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kWrongInput));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kWrongInput));
 }
 
 TEST_F(ServerDataBrokerTests, DefaultStreamIsDetector) {
@@ -187,18 +182,37 @@ TEST_F(ServerDataBrokerTests, GetLastUsesCorrectUri) {
     data_broker->GetLast(&info, expected_group_id, nullptr);
 }
 
-TEST_F(ServerDataBrokerTests, GetImageReturnsEOFFromHttpClient) {
+TEST_F(ServerDataBrokerTests, GetImageReturnsEndOfStreamFromHttpClient) {
     MockGetBrokerUri();
 
     EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll(
                 SetArgPointee<1>(HttpCode::Conflict),
                 SetArgPointee<2>(nullptr),
-                Return("{\"id\":1}")));
+                Return("{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1}")));
 
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(err->Explain(), HasSubstr("timeout"));
+    auto err_data = static_cast<const asapo::ConsumerErrorData*>(err->GetCustomData());
+
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kEndOfStream));
+    ASSERT_THAT(err_data->id, Eq(1));
+    ASSERT_THAT(err_data->id_max, Eq(1));
+}
+
+TEST_F(ServerDataBrokerTests, GetImageReturnsNoDataFromHttpClient) {
+    MockGetBrokerUri();
+
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll(
+                SetArgPointee<1>(HttpCode::Conflict),
+                SetArgPointee<2>(nullptr),
+                Return("{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":2}")));
+
+    auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
+    auto err_data = static_cast<const asapo::ConsumerErrorData*>(err->GetCustomData());
+
+    ASSERT_THAT(err_data->id, Eq(1));
+    ASSERT_THAT(err_data->id_max, Eq(2));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kNoData));
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsNotAuthorized) {
@@ -211,8 +225,7 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsNotAuthorized) {
 
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(err->Explain(), HasSubstr("Authorization"));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kWrongInput));
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsWrongResponseFromHttpClient) {
@@ -226,7 +239,8 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsWrongResponseFromHttpClient) {
 
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err->Explain(), HasSubstr("Cannot parse"));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kBrokerServerError));
+    ASSERT_THAT(err->Explain(), HasSubstr("malformed"));
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsIfBrokerAddressNotFound) {
@@ -239,7 +253,7 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsIfBrokerAddressNotFound) {
     data_broker->SetTimeout(100);
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err->Explain(), AllOf(HasSubstr("broker uri"), HasSubstr("cannot")));
+    ASSERT_THAT(err->Explain(), AllOf(HasSubstr(expected_server_uri), HasSubstr("cannot")));
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsIfBrokerUriEmpty) {
@@ -252,7 +266,7 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsIfBrokerUriEmpty) {
     data_broker->SetTimeout(100);
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err->Explain(), AllOf(HasSubstr("broker uri"), HasSubstr("cannot")));
+    ASSERT_THAT(err->Explain(), AllOf(HasSubstr(expected_server_uri), HasSubstr("cannot")));
 }
 
 TEST_F(ServerDataBrokerTests, GetDoNotCallBrokerUriIfAlreadyFound) {
@@ -281,26 +295,34 @@ TEST_F(ServerDataBrokerTests, GetBrokerUriAgainAfterConnectionError) {
     data_broker->GetNext(&info, expected_group_id, nullptr);
 }
 
-TEST_F(ServerDataBrokerTests, GetImageReturnsEOFFromHttpClientUntilTimeout) {
+TEST_F(ServerDataBrokerTests, GetImageReturnsEofStreamFromHttpClientUntilTimeout) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll(
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).Times(AtLeast(2)).WillRepeatedly(DoAll(
                 SetArgPointee<1>(HttpCode::Conflict),
                 SetArgPointee<2>(nullptr),
-                Return("{\"id\":1}")));
+                Return("{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1}")));
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/" +
-                                        expected_group_id
-                                        + "/1?token=" + expected_token, _,
-                                        _)).Times(AtLeast(1)).WillRepeatedly(DoAll(
-                                                    SetArgPointee<1>(HttpCode::Conflict),
-                                                    SetArgPointee<2>(nullptr),
-                                                    Return("{\"id\":1}")));
+    data_broker->SetTimeout(300);
+    auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    data_broker->SetTimeout(100);
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kEndOfStream));
+}
+
+TEST_F(ServerDataBrokerTests, GetNextImageReturnsImmediatelyOnServerError) {
+    MockGetBrokerUri();
+
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll(
+                SetArgPointee<1>(HttpCode::InternalServerError),
+                SetArgPointee<2>(asapo::IOErrorTemplates::kSocketOperationOnNonSocket.Generate("sss").release()),
+                Return("")));
+
+    data_broker->SetTimeout(300);
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err->Explain(), HasSubstr("timeout"));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kBrokerServerError));
+    ASSERT_THAT(err->Explain(), HasSubstr("sss"));
+
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsFileInfo) {
@@ -327,7 +349,7 @@ TEST_F(ServerDataBrokerTests, GetImageReturnsParseError) {
 
     auto err = data_broker->GetNext(&info, expected_group_id, nullptr);
 
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kErrorReadingSource));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kBrokerServerError));
 }
 
 TEST_F(ServerDataBrokerTests, GetImageReturnsIfNoDataNeeded) {
@@ -400,7 +422,7 @@ TEST_F(ServerDataBrokerTests, GenerateNewGroupIdReturnsErrorCreateGroup) {
     data_broker->SetTimeout(100);
     asapo::Error err;
     auto groupid = data_broker->GenerateNewGroupId(&err);
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kWrongInput));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kWrongInput));
     ASSERT_THAT(groupid, Eq(""));
 }
 
@@ -421,23 +443,36 @@ TEST_F(ServerDataBrokerTests, GenerateNewGroupIdReturnsGroupID) {
     ASSERT_THAT(groupid, Eq(expected_group_id));
 }
 
+TEST_F(ServerDataBrokerTests, ResetCounterByDefaultUsesCorrectUri) {
+    MockGetBrokerUri();
+    data_broker->SetTimeout(100);
+
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/" +
+                                         expected_group_id +
+                                         "/resetcounter?token=" + expected_token + "&value=0", _, _, _)).WillOnce(DoAll(
+                                                     SetArgPointee<2>(HttpCode::OK),
+                                                     SetArgPointee<3>(nullptr),
+                                                     Return("")));
+    auto err = data_broker->ResetLastReadMarker(expected_group_id);
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
 TEST_F(ServerDataBrokerTests, ResetCounterUsesCorrectUri) {
     MockGetBrokerUri();
     data_broker->SetTimeout(100);
 
     EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/" +
                                          expected_group_id +
-                                         "/resetcounter?token="
-                                         + expected_token, _, _, _)).WillOnce(DoAll(
+                                         "/resetcounter?token=" + expected_token + "&value=10", _, _, _)).WillOnce(DoAll(
                                                      SetArgPointee<2>(HttpCode::OK),
                                                      SetArgPointee<3>(nullptr),
                                                      Return("")));
-    auto err = data_broker->ResetCounter(expected_group_id);
+    auto err = data_broker->SetLastReadMarker(10, expected_group_id);
     ASSERT_THAT(err, Eq(nullptr));
 }
 
 
-TEST_F(ServerDataBrokerTests, GetNDataSetsUsesCorrectUri) {
+TEST_F(ServerDataBrokerTests, GetCurrentSizeUsesCorrectUri) {
     MockGetBrokerUri();
     data_broker->SetTimeout(100);
 
@@ -447,13 +482,13 @@ TEST_F(ServerDataBrokerTests, GetNDataSetsUsesCorrectUri) {
                                                     SetArgPointee<2>(nullptr),
                                                     Return("{\"size\":10}")));
     asapo::Error err;
-    auto size = data_broker->GetNDataSets(&err);
+    auto size = data_broker->GetCurrentSize(&err);
     ASSERT_THAT(err, Eq(nullptr));
     ASSERT_THAT(size, Eq(10));
 }
 
 
-TEST_F(ServerDataBrokerTests, GetNDataSetsErrorOnWrongResponce) {
+TEST_F(ServerDataBrokerTests, GetCurrentSizeErrorOnWrongResponce) {
     MockGetBrokerUri();
     data_broker->SetTimeout(100);
 
@@ -463,7 +498,7 @@ TEST_F(ServerDataBrokerTests, GetNDataSetsErrorOnWrongResponce) {
                                                     SetArgPointee<2>(nullptr),
                                                     Return("")));
     asapo::Error err;
-    auto size = data_broker->GetNDataSets(&err);
+    auto size = data_broker->GetCurrentSize(&err);
     ASSERT_THAT(err, Ne(nullptr));
     ASSERT_THAT(size, Eq(0));
 }
@@ -479,7 +514,7 @@ TEST_F(ServerDataBrokerTests, GetNDataErrorOnWrongParse) {
                                                     SetArgPointee<2>(nullptr),
                                                     Return("{\"siz\":10}")));
     asapo::Error err;
-    auto size = data_broker->GetNDataSets(&err);
+    auto size = data_broker->GetCurrentSize(&err);
     ASSERT_THAT(err, Ne(nullptr));
     ASSERT_THAT(size, Eq(0));
 }
@@ -494,7 +529,7 @@ TEST_F(ServerDataBrokerTests, GetByIdUsesCorrectUri) {
                                         expected_group_id
                                         + "/" + std::to_string(
                                             expected_dataset_id) + "?token="
-                                        + expected_token + "&reset=true", _,
+                                        + expected_token, _,
                                         _)).WillOnce(DoAll(
                                                 SetArgPointee<1>(HttpCode::OK),
                                                 SetArgPointee<2>(nullptr),
@@ -504,9 +539,60 @@ TEST_F(ServerDataBrokerTests, GetByIdUsesCorrectUri) {
 
     ASSERT_THAT(err, Eq(nullptr));
     ASSERT_THAT(info.name, Eq(to_send.name));
+}
+
+
+TEST_F(ServerDataBrokerTests, GetByIdTimeouts) {
+    MockGetBrokerUri();
+    data_broker->SetTimeout(10);
+
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/"  +
+                                        expected_group_id + "/" + std::to_string(expected_dataset_id) + "?token="
+                                        + expected_token, _, _)).WillOnce(DoAll(
+                                                    SetArgPointee<1>(HttpCode::Conflict),
+                                                    SetArgPointee<2>(nullptr),
+                                                    Return("")));
+
+    auto err = data_broker->GetById(expected_dataset_id, &info, expected_group_id, nullptr);
+
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kNoData));
+}
+
+TEST_F(ServerDataBrokerTests, GetByIdReturnsEndOfStream) {
+    MockGetBrokerUri();
+    data_broker->SetTimeout(10);
+
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/"  +
+                                        expected_group_id + "/" + std::to_string(expected_dataset_id) + "?token="
+                                        + expected_token, _, _)).WillOnce(DoAll(
+                                                    SetArgPointee<1>(HttpCode::Conflict),
+                                                    SetArgPointee<2>(nullptr),
+                                                    Return("{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1}")));
+
+
+    auto err = data_broker->GetById(expected_dataset_id, &info, expected_group_id, nullptr);
+
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kEndOfStream));
+}
+
+TEST_F(ServerDataBrokerTests, GetByIdReturnsEndOfStreamWhenIdTooLarge) {
+    MockGetBrokerUri();
+    data_broker->SetTimeout(10);
 
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/"  +
+                                        expected_group_id + "/" + std::to_string(expected_dataset_id) + "?token="
+                                        + expected_token, _, _)).WillOnce(DoAll(
+                                                    SetArgPointee<1>(HttpCode::Conflict),
+                                                    SetArgPointee<2>(nullptr),
+                                                    Return("{\"op\":\"get_record_by_id\",\"id\":100,\"id_max\":1}")));
+
+
+    auto err = data_broker->GetById(expected_dataset_id, &info, expected_group_id, nullptr);
+
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kEndOfStream));
 }
 
+
 TEST_F(ServerDataBrokerTests, GetMetaDataOK) {
     MockGetBrokerUri();
     data_broker->SetTimeout(100);
@@ -541,7 +627,7 @@ TEST_F(ServerDataBrokerTests, QueryImagesReturnError) {
     asapo::Error err;
     auto images = data_broker->QueryImages(expected_query_string, &err);
 
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kWrongInput));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kWrongInput));
     ASSERT_THAT(err->Explain(), HasSubstr("query"));
     ASSERT_THAT(images.size(), Eq(0));
 }
@@ -692,7 +778,7 @@ TEST_F(ServerDataBrokerTests, GetDataSetReturnsParseError) {
     asapo::Error err;
     auto dataset = data_broker->GetNextDataset(expected_group_id, &err);
 
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kInternalError));
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kBrokerServerError));
     ASSERT_THAT(dataset.content.size(), Eq(0));
     ASSERT_THAT(dataset.id, Eq(0));
 
@@ -719,7 +805,7 @@ TEST_F(ServerDataBrokerTests, GetDatasetByIdUsesCorrectUri) {
     EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_stream + "/" +
                                         expected_group_id +
                                         "/" + std::to_string(expected_dataset_id) + "?token="
-                                        + expected_token + "&reset=true&dataset=true", _,
+                                        + expected_token + "&dataset=true", _,
                                         _)).WillOnce(DoAll(
                                                 SetArgPointee<1>(HttpCode::OK),
                                                 SetArgPointee<2>(nullptr),
diff --git a/worker/api/cpp/unittests/test_tcp_client.cpp b/consumer/api/cpp/unittests/test_tcp_client.cpp
similarity index 100%
rename from worker/api/cpp/unittests/test_tcp_client.cpp
rename to consumer/api/cpp/unittests/test_tcp_client.cpp
diff --git a/worker/api/cpp/unittests/test_tcp_connection_pool.cpp b/consumer/api/cpp/unittests/test_tcp_connection_pool.cpp
similarity index 100%
rename from worker/api/cpp/unittests/test_tcp_connection_pool.cpp
rename to consumer/api/cpp/unittests/test_tcp_connection_pool.cpp
diff --git a/worker/api/python/CMakeLists.txt b/consumer/api/python/CMakeLists.txt
similarity index 100%
rename from worker/api/python/CMakeLists.txt
rename to consumer/api/python/CMakeLists.txt
diff --git a/worker/api/python/CMakeLists_Linux.cmake b/consumer/api/python/CMakeLists_Linux.cmake
similarity index 77%
rename from worker/api/python/CMakeLists_Linux.cmake
rename to consumer/api/python/CMakeLists_Linux.cmake
index bbc1eaade3d161a7f31ee6c3c779de53e098f386..511ac676aafbdaeee66ac92398eab2a92ecba33b 100644
--- a/worker/api/python/CMakeLists_Linux.cmake
+++ b/consumer/api/python/CMakeLists_Linux.cmake
@@ -9,9 +9,9 @@ else()
     set (EXTRA_LINK_ARGS "[]")
 ENDIF()
 
-get_property(ASAPO_WORKER_LIB TARGET asapo-worker PROPERTY LOCATION)
+get_property(ASAPO_CONSUMER_LIB TARGET asapo-consumer PROPERTY LOCATION)
 
-set (ASAPO_WORKER_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../cpp/include)
+set (ASAPO_CONSUMER_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../cpp/include)
 
 configure_files(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR} @ONLY)
 
@@ -21,7 +21,7 @@ ADD_CUSTOM_TARGET(python-lib2 ALL
 ADD_CUSTOM_TARGET(python-lib3 ALL
         COMMAND python3 setup.py build_ext --inplace --force)
 
-ADD_DEPENDENCIES(python-lib2 asapo-worker)
-ADD_DEPENDENCIES(python-lib3 asapo-worker)
+ADD_DEPENDENCIES(python-lib2 asapo-consumer)
+ADD_DEPENDENCIES(python-lib3 asapo-consumer)
 
 add_subdirectory(source_dist_linux)
diff --git a/worker/api/python/CMakeLists_Windows.cmake b/consumer/api/python/CMakeLists_Windows.cmake
similarity index 79%
rename from worker/api/python/CMakeLists_Windows.cmake
rename to consumer/api/python/CMakeLists_Windows.cmake
index c02b010ec5ffa1d268bbb49f91b485344bef04e4..c37ee4180b2f7b61816e70ef10cca0d35722b9ae 100644
--- a/worker/api/python/CMakeLists_Windows.cmake
+++ b/consumer/api/python/CMakeLists_Windows.cmake
@@ -7,15 +7,15 @@ message ("   Python includes:" ${Python3_INCLUDE_DIRS})
 message ("   Numpy:" ${PYTHON_NUMPY_INCLUDE_DIR})
 
 
-add_custom_command(OUTPUT asapo_worker.cpp
+add_custom_command(OUTPUT asapo_consumer.cpp
         COMMAND ${Python3_EXECUTABLE} cythonize.py
-        DEPENDS asapo-worker)
+        DEPENDS asapo-consumer)
 
 
-set(TARGET_NAME asapo_worker)
+set(TARGET_NAME asapo_consumer)
 
 set(SOURCE_FILES
-        asapo_worker.cpp)
+        asapo_consumer.cpp)
 
 add_library(${TARGET_NAME} SHARED ${SOURCE_FILES})
 set_target_properties(${TARGET_NAME} PROPERTIES SUFFIX ".pyd")
@@ -23,7 +23,7 @@ set_target_properties(${TARGET_NAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
         ${CMAKE_CURRENT_BINARY_DIR}$<$<CONFIG:Debug>:>
         )
 
-target_link_libraries(${TARGET_NAME}  asapo-worker ${Python3_LIBRARIES})
+target_link_libraries(${TARGET_NAME}  asapo-consumer ${Python3_LIBRARIES})
 target_include_directories(${TARGET_NAME} PUBLIC include  ${Python3_INCLUDE_DIRS} ${PYTHON_NUMPY_INCLUDE_DIR})
 
 
diff --git a/worker/api/python/asapo_worker.pxd b/consumer/api/python/asapo_consumer.pxd
similarity index 54%
rename from worker/api/python/asapo_worker.pxd
rename to consumer/api/python/asapo_consumer.pxd
index 3146e51f797daf4e841d3956bdd2d39cefe36261..811d1099f1b0eb46b158c2a9fe2792630c83237a 100644
--- a/worker/api/python/asapo_worker.pxd
+++ b/consumer/api/python/asapo_consumer.pxd
@@ -7,20 +7,28 @@ from libcpp cimport bool
 ctypedef unsigned char uint8_t
 ctypedef unsigned long uint64_t
 
+ctypedef unique_ptr[ErrorInterface] Error
 
-cdef extern from "asapo_worker.h" namespace "asapo":
-  cppclass Error:
+cdef extern from "asapo_consumer.h" namespace "asapo":
+  cppclass CustomErrorData:
     pass
+  cppclass ErrorInterface:
+    string Explain()
+    const CustomErrorData* GetCustomData()
+  cppclass ErrorTemplateInterface:
+    pass
+  cdef bool operator==(Error lhs, ErrorTemplateInterface rhs)
+
 
 cdef extern from "asapo_wrappers.h" namespace "asapo":
   cdef string GetErrorString(Error* err)
 
-cdef extern from "asapo_worker.h" namespace "asapo":
+cdef extern from "asapo_consumer.h" namespace "asapo":
   cppclass FileData:
     unique_ptr[uint8_t[]] release()
     pass
 
-cdef extern from "asapo_worker.h" namespace "asapo":
+cdef extern from "asapo_consumer.h" namespace "asapo":
   cppclass FileInfo:
     string Json()
     bool SetFromJson(string json_str)
@@ -35,15 +43,16 @@ cdef extern from "asapo_worker.h" namespace "asapo":
     string stream
     string user_token
 
-cdef extern from "asapo_worker.h" namespace "asapo" nogil:
+cdef extern from "asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass DataBroker:
         DataBroker() except +
         void SetTimeout(uint64_t timeout_ms)
         Error GetNext(FileInfo* info, string group_id, FileData* data)
         Error GetLast(FileInfo* info, string group_id, FileData* data)
         Error GetById(uint64_t id, FileInfo* info, string group_id, FileData* data)
-        uint64_t GetNDataSets(Error* err)
-        Error ResetCounter(string group_id)
+        uint64_t GetCurrentSize(Error* err)
+        Error SetLastReadMarker(uint64_t value, string group_id)
+        Error ResetLastReadMarker(string group_id)
         string GenerateNewGroupId(Error* err)
         string GetBeamtimeMeta(Error* err)
         FileInfos QueryImages(string query, Error* err)
@@ -53,9 +62,20 @@ cdef extern from "asapo_worker.h" namespace "asapo" nogil:
         Error RetrieveData(FileInfo* info, FileData* data)
 
 
-cdef extern from "asapo_worker.h" namespace "asapo" nogil:
+cdef extern from "asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass DataBrokerFactory:
         DataBrokerFactory() except +
         unique_ptr[DataBroker] CreateServerBroker(string server_name,string source_path,SourceCredentials source,Error* error)
 
 
+cdef extern from "asapo_consumer.h" namespace "asapo":
+  ErrorTemplateInterface kNoData "asapo::ConsumerErrorTemplates::kNoData"
+  ErrorTemplateInterface kEndOfStream "asapo::ConsumerErrorTemplates::kEndOfStream"
+  ErrorTemplateInterface kBrokerServersNotFound "asapo::ConsumerErrorTemplates::kBrokerServersNotFound"
+  ErrorTemplateInterface kBrokerServerError "asapo::ConsumerErrorTemplates::kBrokerServerError"
+  ErrorTemplateInterface kIOError "asapo::ConsumerErrorTemplates::kIOError"
+  ErrorTemplateInterface kWrongInput "asapo::ConsumerErrorTemplates::kWrongInput"
+  cdef cppclass ConsumerErrorData:
+    uint64_t id
+    uint64_t id_max
+
diff --git a/worker/api/python/asapo_worker.pyx.in b/consumer/api/python/asapo_consumer.pyx.in
similarity index 65%
rename from worker/api/python/asapo_worker.pyx.in
rename to consumer/api/python/asapo_consumer.pyx.in
index cd19e16d5304de8fceef47e88edced7c12041bbb..fd7d3047e61011eef2497c2276f77c3fd4f2d2e6 100644
--- a/worker/api/python/asapo_worker.pyx.in
+++ b/consumer/api/python/asapo_consumer.pyx.in
@@ -1,6 +1,6 @@
 #distutils: language=c++
 
-cimport asapo_worker
+cimport asapo_consumer
 import numpy as np
 cimport numpy as np
 import json
@@ -28,6 +28,43 @@ cdef bytes _bytes(s):
     else:
         raise TypeError("Could not convert to unicode.")
 
+class AsapoConsumerError(Exception):
+  pass
+
+class AsapoWrongInputError(AsapoConsumerError):
+  pass
+
+class AsapoEndOfStreamError(AsapoConsumerError):
+  def __init__(self,message,id_max=None):
+    AsapoConsumerError.__init__(self,message)
+    self.id_max = id_max
+
+class AsapoNoDataError(AsapoConsumerError):
+  def __init__(self,message,id=None,id_max=None):
+    AsapoConsumerError.__init__(self,message)
+    self.id_max = id_max
+    self.id = id
+
+
+cdef throw_exception(Error& err):
+    cdef ConsumerErrorData* data
+    if err == kEndOfStream:
+            data=<ConsumerErrorData*>err.get().GetCustomData()
+            if data != NULL:
+                raise AsapoEndOfStreamError(err.get().Explain(),data.id_max)
+            else:
+                raise AsapoEndOfStreamError(err.get().Explain())
+    elif err == kNoData:
+            data=<ConsumerErrorData*>err.get().GetCustomData()
+            if data != NULL:
+                raise AsapoNoDataError(err.get().Explain(),data.id,data.id_max)
+            else:
+                raise AsapoNoDataError(err.get().Explain())
+    elif err == kWrongInput:
+            raise AsapoWrongInputError(err.get().Explain())
+    else:
+        raise AsapoConsumerError(err.get().Explain())
+
 
 cdef class PyDataBroker:
     cdef DataBroker* c_broker
@@ -47,18 +84,17 @@ cdef class PyDataBroker:
         elif op == "id":
             with nogil:
                 err =  self.c_broker.GetById(id, &info, b_group_id, p_data)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None,None,err_str
+        if err:
+            throw_exception(err)
         info_str = _str(info.Json())
         meta = json.loads(info_str)
         if meta_only:
-            return None,meta,None
+            return None,meta
         cdef char* ptr = <char*> data.release()
         dims[0] = meta['size']
         arr =  np.PyArray_SimpleNewFromData(1, dims, np.NPY_BYTE, ptr)
         PyArray_ENABLEFLAGS(arr,np.NPY_OWNDATA)
-        return arr,meta,None
+        return arr,meta
     def get_next(self, group_id, meta_only = True):
         return self._op("next",group_id,meta_only,0)
     def get_last(self, group_id, meta_only = True):
@@ -69,64 +105,65 @@ cdef class PyDataBroker:
         json_str = json.dumps(meta)
         cdef FileInfo info
         if not info.SetFromJson(_bytes(json_str)):
-            return None,"wrong metadata"
+            raise AsapoWrongInputError("wrong metadata")
         cdef Error err
         cdef FileData data
         with nogil:
             err =  self.c_broker.RetrieveData(&info, &data)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None,err_str
+        if err:
+            throw_exception(err)
         cdef np.npy_intp dims[1]
         dims[0] = meta['size']
         cdef char* ptr = <char*> data.release()
         arr =  np.PyArray_SimpleNewFromData(1, dims, np.NPY_BYTE, ptr)
         PyArray_ENABLEFLAGS(arr,np.NPY_OWNDATA)
-        return arr,None
-    def get_ndatasets(self):
+        return arr
+    def get_current_size(self):
         cdef Error err
         cdef uint64_t size
         with nogil:
-            size =  self.c_broker.GetNDataSets(&err)
+            size =  self.c_broker.GetCurrentSize(&err)
         err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None,err_str
-        else:
-            return size,None
-    def reset_counter(self,group_id):
+        if err:
+            throw_exception(err)
+        return size
+    def set_lastread_marker(self,value,group_id):
         cdef string b_group_id = _bytes(group_id)
         cdef Error err
+        cdef uint64_t id = value
         with nogil:
-            err =  self.c_broker.ResetCounter(b_group_id)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return err_str
-        else:
-            return None
+            err =  self.c_broker.SetLastReadMarker(id,b_group_id)
+        if err:
+            throw_exception(err)
+        return
+    def reset_lastread_marker(self,group_id):
+        cdef string b_group_id = _bytes(group_id)
+        cdef Error err
+        with nogil:
+            err =  self.c_broker.ResetLastReadMarker(b_group_id)
+        if err:
+            throw_exception(err)
+        return
     def generate_group_id(self):
         cdef Error err
         cdef string group_id
         with nogil:
             group_id = self.c_broker.GenerateNewGroupId(&err)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None, err_str
-        else:
-            return _str(group_id), None
+        if err:
+            throw_exception(err)
+        return _str(group_id)
     def query_images(self,query):
         cdef string b_query = _bytes(query)
         cdef Error err
         cdef FileInfos file_infos
         with nogil:
             file_infos = self.c_broker.QueryImages(b_query,&err)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None, err_str
-        else:
-            json_list = []
-            for fi in file_infos:
-                json_list.append(json.loads(_str(fi.Json())))
-            return json_list, None
+        if err:
+            throw_exception(err)
+        json_list = []
+        for fi in file_infos:
+            json_list.append(json.loads(_str(fi.Json())))
+        return json_list
     def _op_dataset(self, op, group_id, uint64_t id):
         cdef string b_group_id = _bytes(group_id)
         cdef FileInfos file_infos
@@ -141,36 +178,28 @@ cdef class PyDataBroker:
         elif op == "id":
             with nogil:
                 dataset = self.c_broker.GetDatasetById(id, b_group_id, &err)
-        err_str = _str(GetErrorString(&err))
-        if err_str.strip():
-            return None, None, err_str
-        else:
-            json_list = []
-            for fi in dataset.content:
-                json_list.append(json.loads(_str(fi.Json())))
-            return dataset.id, json_list, None
-
-
+        if err:
+            throw_exception(err)
+        json_list = []
+        for fi in dataset.content:
+            json_list.append(json.loads(_str(fi.Json())))
+        return dataset.id, json_list
     def get_next_dataset(self, group_id):
         return self._op_dataset("next",group_id,0)
     def get_last_dataset(self, group_id):
         return self._op_dataset("last",group_id,0)
     def get_dataset_by_id(self, id, group_id):
         return self._op_dataset("id",group_id,id)
-
-
     def get_beamtime_meta(self):
-            cdef Error err
-            cdef string meta_str
-            with nogil:
-                meta_str = self.c_broker.GetBeamtimeMeta(&err)
-            err_str = _str(GetErrorString(&err))
-            if err_str.strip():
-                return None, err_str
-            else:
-                meta = json.loads(_str(meta_str))
-                del meta['_id']
-                return meta, None
+        cdef Error err
+        cdef string meta_str
+        with nogil:
+            meta_str = self.c_broker.GetBeamtimeMeta(&err)
+        if err:
+            throw_exception(err)
+        meta = json.loads(_str(meta_str))
+        del meta['_id']
+        return meta
 
 cdef class __PyDataBrokerFactory:
     cdef DataBrokerFactory c_factory
@@ -191,11 +220,9 @@ cdef class __PyDataBrokerFactory:
         broker = PyDataBroker()
         broker.c_broker =  c_broker.release()
         broker.c_broker.SetTimeout(timeout)
-        err_str = GetErrorString(&err)
-        if err_str.strip():
-            return None,err_str
-        else:
-            return broker,None
+        if err:
+            throw_exception(err)
+        return broker
 
 def create_server_broker(server_name,source_path,beamtime_id,stream,token,timeout_ms):
     """
diff --git a/worker/api/python/asapo_wrappers.h b/consumer/api/python/asapo_wrappers.h
similarity index 100%
rename from worker/api/python/asapo_wrappers.h
rename to consumer/api/python/asapo_wrappers.h
diff --git a/worker/api/python/binary_dist_windows/CMakeLists.txt b/consumer/api/python/binary_dist_windows/CMakeLists.txt
similarity index 61%
rename from worker/api/python/binary_dist_windows/CMakeLists.txt
rename to consumer/api/python/binary_dist_windows/CMakeLists.txt
index ac46814f2f72398700d58ad9f3758dfe857fbb00..726eaa6dde696f79472e6f97e57d6c0d8ee3cbe1 100644
--- a/worker/api/python/binary_dist_windows/CMakeLists.txt
+++ b/consumer/api/python/binary_dist_windows/CMakeLists.txt
@@ -6,11 +6,11 @@ ADD_CUSTOM_TARGET(windows-bdist ALL
 
 
 ADD_CUSTOM_TARGET(copy_python_bdist ALL
-        COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/asapo_worker
-        COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:asapo_worker> ${CMAKE_CURRENT_BINARY_DIR}/asapo_worker/
+        COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/asapo_consumer
+        COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:asapo_consumer> ${CMAKE_CURRENT_BINARY_DIR}/asapo_consumer/
         )
 
 
-ADD_DEPENDENCIES(copy_python_bdist asapo_worker)
+ADD_DEPENDENCIES(copy_python_bdist asapo_consumer)
 
 ADD_DEPENDENCIES(windows-bdist copy_python_bdist)
diff --git a/worker/api/python/binary_dist_windows/setup.py.in b/consumer/api/python/binary_dist_windows/setup.py.in
similarity index 60%
rename from worker/api/python/binary_dist_windows/setup.py.in
rename to consumer/api/python/binary_dist_windows/setup.py.in
index cc896317e695130123fc6862fe575147698422fe..d2f1e438360b71ff5ef7f694ce5b589a10c99151 100644
--- a/worker/api/python/binary_dist_windows/setup.py.in
+++ b/consumer/api/python/binary_dist_windows/setup.py.in
@@ -1,12 +1,12 @@
 import setuptools
 
 setuptools.setup(
-      name ="asapo_worker",
+      name ="asapo_consumer",
       version = "@ASAPO_VERSION_PYTHON@",
       install_requires=['numpy'],
       include_package_data=True,
-      packages=['asapo_worker'],
+      packages=['asapo_consumer'],
       package_data={
-        'asapo_worker': ['asapo_worker.pyd'],
+        'asapo_consumer': ['asapo_consumer.pyd'],
       },
 )
diff --git a/worker/api/python/cythonize.py b/consumer/api/python/cythonize.py
similarity index 51%
rename from worker/api/python/cythonize.py
rename to consumer/api/python/cythonize.py
index f0c0ae871b4d742ec7f4324d96100e722bb52198..d98836b8632a2658b71b48db4b00429acdc16a40 100644
--- a/worker/api/python/cythonize.py
+++ b/consumer/api/python/cythonize.py
@@ -1,3 +1,3 @@
 from Cython.Build import cythonize
 
-cythonize(["asapo_worker.pyx"])
+cythonize(["asapo_consumer.pyx"])
diff --git a/worker/api/python/setup.py.in b/consumer/api/python/setup.py.in
similarity index 74%
rename from worker/api/python/setup.py.in
rename to consumer/api/python/setup.py.in
index b2815861f2983e18f31171082368afaf5bb51383..b70319b080b519e0a2dd2a4eafc78764ca3be067 100644
--- a/worker/api/python/setup.py.in
+++ b/consumer/api/python/setup.py.in
@@ -5,10 +5,10 @@ from Cython.Build import cythonize
 
 import numpy
 
-module = Extension("asapo_worker", ["asapo_worker.pyx"],
-                       extra_objects=['@ASAPO_WORKER_LIB@',
+module = Extension("asapo_consumer", ["asapo_consumer.pyx"],
+                       extra_objects=['@ASAPO_CONSUMER_LIB@',
                                       '@CURL_LIBRARIES@'],
-                       include_dirs=["@ASAPO_CXX_COMMON_INCLUDE_DIR@","@ASAPO_WORKER_INCLUDE_DIR@",numpy.get_include()],
+                       include_dirs=["@ASAPO_CXX_COMMON_INCLUDE_DIR@","@ASAPO_CONSUMER_INCLUDE_DIR@",numpy.get_include()],
                        extra_compile_args=@EXTRA_COMPILE_ARGS@,
                        extra_link_args=@EXTRA_LINK_ARGS@,
                        language="c++",
diff --git a/worker/api/python/source_dist_linux/CMakeLists.txt b/consumer/api/python/source_dist_linux/CMakeLists.txt
similarity index 76%
rename from worker/api/python/source_dist_linux/CMakeLists.txt
rename to consumer/api/python/source_dist_linux/CMakeLists.txt
index 48fe09828008db7dde6c425146f05ff5e1aa6c2f..526146c65aaca82983e6b48cc16f5f1427a9b46a 100644
--- a/worker/api/python/source_dist_linux/CMakeLists.txt
+++ b/consumer/api/python/source_dist_linux/CMakeLists.txt
@@ -6,17 +6,17 @@ ADD_CUSTOM_TARGET(python-dist ALL
         COMMAND python setup.py sdist
         )
 
-#get_property(ASAPO_WORKER_LIB TARGET asapo-worker PROPERTY LOCATION)
+#get_property(ASAPO_CONSUMER_LIB TARGET asapo-consumer PROPERTY LOCATION)
 
 ADD_CUSTOM_TARGET(copy_python_dist ALL
         COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/../asapo_wrappers.h ${CMAKE_CURRENT_BINARY_DIR}/.
-        COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/../asapo_worker.cpp ${CMAKE_CURRENT_BINARY_DIR}/.
-        COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/worker/api/cpp/include ${CMAKE_CURRENT_BINARY_DIR}/include
+        COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/../asapo_consumer.cpp ${CMAKE_CURRENT_BINARY_DIR}/.
+        COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/consumer/api/cpp/include ${CMAKE_CURRENT_BINARY_DIR}/include
         COMMAND ${CMAKE_COMMAND} -E copy_directory ${ASAPO_CXX_COMMON_INCLUDE_DIR}/common ${CMAKE_CURRENT_BINARY_DIR}/include/common
         COMMAND ${CMAKE_COMMAND} -E remove ${CMAKE_CURRENT_BINARY_DIR}/include/common/version.h.in
         COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/lib
         COMMAND ${CMAKE_COMMAND} -E copy ${CURL_LIBRARIES} ${CMAKE_CURRENT_BINARY_DIR}/lib
-        COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:asapo-worker> ${CMAKE_CURRENT_BINARY_DIR}/lib
+        COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:asapo-consumer> ${CMAKE_CURRENT_BINARY_DIR}/lib
 
         )
 
diff --git a/worker/api/python/source_dist_linux/MANIFEST.in b/consumer/api/python/source_dist_linux/MANIFEST.in
similarity index 100%
rename from worker/api/python/source_dist_linux/MANIFEST.in
rename to consumer/api/python/source_dist_linux/MANIFEST.in
diff --git a/worker/api/python/source_dist_linux/setup.py.in b/consumer/api/python/source_dist_linux/setup.py.in
similarity index 78%
rename from worker/api/python/source_dist_linux/setup.py.in
rename to consumer/api/python/source_dist_linux/setup.py.in
index d4df8d1c82987c0e14dff5091f3755eec6aa8fd4..43c0c9eb44ba6dd1732d83efab96b9f7829cae64 100644
--- a/worker/api/python/source_dist_linux/setup.py.in
+++ b/consumer/api/python/source_dist_linux/setup.py.in
@@ -3,8 +3,8 @@ from distutils.core import Extension
 import numpy
 
 ext_modules = [
-    Extension("asapo_worker", ["asapo_worker.cpp"],
-              extra_objects=['lib/libasapo-worker.a'],
+    Extension("asapo_consumer", ["asapo_consumer.cpp"],
+              extra_objects=['lib/libasapo-consumer.a'],
 					   libraries = ['curl'],
                        include_dirs=["include/common","include",numpy.get_include()],
                        extra_compile_args=@EXTRA_COMPILE_ARGS@,
@@ -13,7 +13,7 @@ ext_modules = [
 ]
 
 setup(
-      name ="asapo_worker",
+      name ="asapo_consumer",
       ext_modules = ext_modules,
       version = "@ASAPO_VERSION_PYTHON@",
       setup_requires=["numpy"],
diff --git a/worker/tools/CMakeLists.txt b/consumer/tools/CMakeLists.txt
similarity index 100%
rename from worker/tools/CMakeLists.txt
rename to consumer/tools/CMakeLists.txt
diff --git a/worker/tools/folder_to_db/CMakeLists.txt b/consumer/tools/folder_to_db/CMakeLists.txt
similarity index 100%
rename from worker/tools/folder_to_db/CMakeLists.txt
rename to consumer/tools/folder_to_db/CMakeLists.txt
diff --git a/worker/tools/folder_to_db/src/folder_db_importer.cpp b/consumer/tools/folder_to_db/src/folder_db_importer.cpp
similarity index 100%
rename from worker/tools/folder_to_db/src/folder_db_importer.cpp
rename to consumer/tools/folder_to_db/src/folder_db_importer.cpp
diff --git a/worker/tools/folder_to_db/src/folder_db_importer.h b/consumer/tools/folder_to_db/src/folder_db_importer.h
similarity index 100%
rename from worker/tools/folder_to_db/src/folder_db_importer.h
rename to consumer/tools/folder_to_db/src/folder_db_importer.h
diff --git a/worker/tools/folder_to_db/src/main.cpp b/consumer/tools/folder_to_db/src/main.cpp
similarity index 100%
rename from worker/tools/folder_to_db/src/main.cpp
rename to consumer/tools/folder_to_db/src/main.cpp
diff --git a/worker/tools/folder_to_db/unittests/test_folder_to_db.cpp b/consumer/tools/folder_to_db/unittests/test_folder_to_db.cpp
similarity index 100%
rename from worker/tools/folder_to_db/unittests/test_folder_to_db.cpp
rename to consumer/tools/folder_to_db/unittests/test_folder_to_db.cpp
diff --git a/deploy/docker/build_env/Ubuntu16.04/build.sh b/deploy/docker/build_env/Ubuntu16.04/build.sh
index edebc5038a65d919b7dbb10f9db3973c97d12c8f..40318cb213fe0c5090b888e67c2e8a6db996f000 100755
--- a/deploy/docker/build_env/Ubuntu16.04/build.sh
+++ b/deploy/docker/build_env/Ubuntu16.04/build.sh
@@ -2,7 +2,7 @@
 
 cd /asapo/build
 cmake -DCMAKE_BUILD_TYPE="Release" -DLIBCURL_DIR=/curl -DBUILD_PYTHON_DOCS=ON ..
-cd worker && make
+cd consumer && make
 cd ../producer && make
 cd ../sphinx && make
 
diff --git a/doxygen.ini b/doxygen.ini
index 10658938915284fdca9482b028fc5cd7af86d400..a4689cde6bad318094c346b35669311b50858d05 100644
--- a/doxygen.ini
+++ b/doxygen.ini
@@ -774,7 +774,7 @@ WARN_LOGFILE           =
 INPUT                  = common \
                          lib \
                          producer \
-                         worker \
+                         consumer \
                          receiver
 
 # This tag can be used to specify the character encoding of the source files
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index beb4a38d87dce9beea4b529653a1452df56b52c0..20deaabb0ece834782b16cf32b11dd9c3f0ca783 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -1,4 +1,4 @@
-add_subdirectory(worker)
+add_subdirectory(consumer)
 add_subdirectory(pipeline)
 add_subdirectory(producer)
 
diff --git a/examples/worker/CMakeLists.txt b/examples/consumer/CMakeLists.txt
similarity index 82%
rename from examples/worker/CMakeLists.txt
rename to examples/consumer/CMakeLists.txt
index f3b694a1306779543ffeb0931f7fbf5ad59f14eb..f06f7029345755ff87348de4e3ae721116895b3e 100644
--- a/examples/worker/CMakeLists.txt
+++ b/examples/consumer/CMakeLists.txt
@@ -1,7 +1,5 @@
 find_package(Threads)
 
-add_subdirectory(process_folder)
-
 add_subdirectory(getnext_broker)
 
 if (UNIX OR CMAKE_BUILD_TYPE STREQUAL "Release")
diff --git a/examples/worker/getnext_broker/CMakeLists.txt b/examples/consumer/getnext_broker/CMakeLists.txt
similarity index 91%
rename from examples/worker/getnext_broker/CMakeLists.txt
rename to examples/consumer/getnext_broker/CMakeLists.txt
index 19796f976678b2127e7bdcbeb15b2e9c900739ec..aeda6a8ce710d51b8d5c303be88d671b6bf9c9d0 100644
--- a/examples/worker/getnext_broker/CMakeLists.txt
+++ b/examples/consumer/getnext_broker/CMakeLists.txt
@@ -2,7 +2,7 @@ set(TARGET_NAME getnext_broker)
 set(SOURCE_FILES getnext_broker.cpp)
 
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} asapo-worker)
+target_link_libraries(${TARGET_NAME} asapo-consumer)
 
 #use expression generator to get rid of VS adding Debug/Release folders
 set_target_properties(${TARGET_NAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
@@ -17,7 +17,7 @@ prepare_asapo()
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY)
 add_script_test("${TARGET_NAME}" "${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME}")
 
-set (dir examples/worker/${TARGET_NAME})
+set (dir examples/consumer/${TARGET_NAME})
 install(TARGETS ${TARGET_NAME} DESTINATION "${dir}")
 install(FILES ${SOURCE_FILES} DESTINATION "${dir}")
 
diff --git a/examples/worker/getnext_broker/CMakeLists_separate.in b/examples/consumer/getnext_broker/CMakeLists_separate.in
similarity index 89%
rename from examples/worker/getnext_broker/CMakeLists_separate.in
rename to examples/consumer/getnext_broker/CMakeLists_separate.in
index 5bcf1c56fc63ba39f0968b4d6f2e4e67279927b6..b4202e55288dbebbbc0435a615677a37ac1c36e7 100644
--- a/examples/worker/getnext_broker/CMakeLists_separate.in
+++ b/examples/consumer/getnext_broker/CMakeLists_separate.in
@@ -31,4 +31,4 @@ link_directories(@CMAKE_INSTALL_PREFIX@/lib)
 
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
 target_include_directories(${TARGET_NAME} PUBLIC @CMAKE_INSTALL_PREFIX@/include ${CURL_INCLUDE_DIRS})
-target_link_libraries(${TARGET_NAME} asapo-worker ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
+target_link_libraries(${TARGET_NAME} asapo-consumer ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
diff --git a/examples/worker/getnext_broker/Makefile.in b/examples/consumer/getnext_broker/Makefile.in
similarity index 83%
rename from examples/worker/getnext_broker/Makefile.in
rename to examples/consumer/getnext_broker/Makefile.in
index ae61c2ed41f2448088fbda0b24b71bd6ba5e7bd6..845611464d7267b11a64603da1a76a188455d297 100644
--- a/examples/worker/getnext_broker/Makefile.in
+++ b/examples/consumer/getnext_broker/Makefile.in
@@ -3,7 +3,7 @@ PROGRAM=@TARGET_NAME@
 CXX=g++
 CXXFLAGS=-std=c++11
 LDFLAGS=-pthread  -static-libgcc -static-libstdc++
-LIBS=-L @CMAKE_INSTALL_PREFIX@/lib -lasapo-worker -L ${LIBCURL_DIR}/lib -lcurl
+LIBS=-L @CMAKE_INSTALL_PREFIX@/lib -lasapo-consumer -L ${LIBCURL_DIR}/lib -lcurl
 INCLUDE=-I @CMAKE_INSTALL_PREFIX@/include -I ${LIBCURL_DIR}/include
 RM=rm -f
 
diff --git a/examples/worker/getnext_broker/check_linux.sh b/examples/consumer/getnext_broker/check_linux.sh
similarity index 100%
rename from examples/worker/getnext_broker/check_linux.sh
rename to examples/consumer/getnext_broker/check_linux.sh
diff --git a/examples/worker/getnext_broker/check_windows.bat b/examples/consumer/getnext_broker/check_windows.bat
similarity index 100%
rename from examples/worker/getnext_broker/check_windows.bat
rename to examples/consumer/getnext_broker/check_windows.bat
diff --git a/examples/worker/getnext_broker/getnext_broker.cpp b/examples/consumer/getnext_broker/getnext_broker.cpp
similarity index 97%
rename from examples/worker/getnext_broker/getnext_broker.cpp
rename to examples/consumer/getnext_broker/getnext_broker.cpp
index 871ae2c64597353d8662d9321e3ef81077d84df5..5e11c3aab9106ab8331c4b46d7ac698110e43ed8 100644
--- a/examples/worker/getnext_broker/getnext_broker.cpp
+++ b/examples/consumer/getnext_broker/getnext_broker.cpp
@@ -11,7 +11,7 @@
 #include <sstream>
 
 
-#include "asapo_worker.h"
+#include "asapo_consumer.h"
 
 using std::chrono::system_clock;
 using asapo::Error;
@@ -40,7 +40,7 @@ void WaitThreads(std::vector<std::thread>* threads) {
 int ProcessError(const Error& err) {
     if (err == nullptr) return 0;
     std::cout << err->Explain() << std::endl;
-    return err == asapo::IOErrorTemplates::kTimeout ? 0 : 1;
+    return err == asapo::ConsumerErrorTemplates::kEndOfStream ? 0 : 1;
 }
 
 std::vector<std::thread> StartThreads(const Args& params,
@@ -58,7 +58,6 @@ std::vector<std::thread> StartThreads(const Args& params,
         asapo::FileData data;
 
         lock.lock();
-
         if (group_id.empty()) {
             group_id = broker->GenerateNewGroupId(&err);
             if (err) {
@@ -96,9 +95,10 @@ std::vector<std::thread> StartThreads(const Args& params,
                     }
                 }
             }
+
             if (err) {
                 (*errors)[i] += ProcessError(err);
-                if (err == asapo::IOErrorTemplates::kTimeout) {
+                if (err != asapo::ConsumerErrorTemplates::kNoData ) {
                     break;
                 }
             }
diff --git a/examples/worker/getnext_broker_python/CMakeLists.txt b/examples/consumer/getnext_broker_python/CMakeLists.txt
similarity index 75%
rename from examples/worker/getnext_broker_python/CMakeLists.txt
rename to examples/consumer/getnext_broker_python/CMakeLists.txt
index 3964a2a0eb38860566fd739df4e3b00eec3dfa12..2e13a9d90ec9b5b6d278fd3d257a2542a38fd0f8 100644
--- a/examples/worker/getnext_broker_python/CMakeLists.txt
+++ b/examples/consumer/getnext_broker_python/CMakeLists.txt
@@ -6,12 +6,12 @@ prepare_asapo()
 if (UNIX)
     get_target_property(PYTHON_LIBS python-lib2 BINARY_DIR)
 else()
-    get_target_property(PYTHON_LIBS asapo_worker BINARY_DIR)
+    get_target_property(PYTHON_LIBS asapo_consumer BINARY_DIR)
 endif()
 
 add_script_test("${TARGET_NAME}" ${PYTHON_LIBS} nomem)
 
 configure_file(getnext.py getnext.py COPYONLY)
 
-set (dir examples/worker/${TARGET_NAME})
+set (dir examples/consumer/${TARGET_NAME})
 install(FILES ${CMAKE_CURRENT_BINARY_DIR}/getnext.py DESTINATION "${dir}")
diff --git a/examples/worker/getnext_broker_python/check_linux.sh b/examples/consumer/getnext_broker_python/check_linux.sh
similarity index 100%
rename from examples/worker/getnext_broker_python/check_linux.sh
rename to examples/consumer/getnext_broker_python/check_linux.sh
diff --git a/examples/worker/getnext_broker_python/check_windows.bat b/examples/consumer/getnext_broker_python/check_windows.bat
similarity index 100%
rename from examples/worker/getnext_broker_python/check_windows.bat
rename to examples/consumer/getnext_broker_python/check_windows.bat
diff --git a/examples/consumer/getnext_broker_python/getnext.py b/examples/consumer/getnext_broker_python/getnext.py
new file mode 100644
index 0000000000000000000000000000000000000000..a17fe4f85ea0a3e6eafa0c5988c1de7b4b3fa107
--- /dev/null
+++ b/examples/consumer/getnext_broker_python/getnext.py
@@ -0,0 +1,26 @@
+from __future__ import print_function
+
+import asapo_consumer
+import json
+import sys
+
+source, path, beamtime, token, group_id = sys.argv[1:]
+
+broker = asapo_consumer.create_server_broker(source,path, beamtime,"",token,1000)
+
+
+if group_id == "new":
+    group_id_new = broker.generate_group_id()
+    print ('generated group id: ', group_id_new)
+else:
+    group_id_new = group_id
+
+_, meta = broker.get_next(group_id_new, meta_only=True)
+print ('filename: ', meta['name'])
+print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
+
+try:
+    beamtime_meta = broker.get_beamtime_meta()
+    print ('beamtime meta: ', json.dumps(beamtime_meta, indent=4, sort_keys=True))
+except asapo_consumer.AsapoError as err:
+    print ('error getting beamtime meta: ', err)
diff --git a/examples/pipeline/in_to_out/CMakeLists.txt b/examples/pipeline/in_to_out/CMakeLists.txt
index d8579506c2829c7de64668d58d2f4ed90b8bbf78..efe0d0f8a6f0e87758fef1c4fa683fa469068fa0 100644
--- a/examples/pipeline/in_to_out/CMakeLists.txt
+++ b/examples/pipeline/in_to_out/CMakeLists.txt
@@ -2,7 +2,7 @@ set(TARGET_NAME pipeline_inout)
 set(SOURCE_FILES in_to_out.cpp)
 
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} asapo-worker asapo-producer)
+target_link_libraries(${TARGET_NAME} asapo-consumer asapo-producer)
 
 #use expression generator to get rid of VS adding Debug/Release folders
 set_target_properties(${TARGET_NAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
diff --git a/examples/pipeline/in_to_out/CMakeLists_separate.in b/examples/pipeline/in_to_out/CMakeLists_separate.in
index 5bcf1c56fc63ba39f0968b4d6f2e4e67279927b6..b4202e55288dbebbbc0435a615677a37ac1c36e7 100644
--- a/examples/pipeline/in_to_out/CMakeLists_separate.in
+++ b/examples/pipeline/in_to_out/CMakeLists_separate.in
@@ -31,4 +31,4 @@ link_directories(@CMAKE_INSTALL_PREFIX@/lib)
 
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
 target_include_directories(${TARGET_NAME} PUBLIC @CMAKE_INSTALL_PREFIX@/include ${CURL_INCLUDE_DIRS})
-target_link_libraries(${TARGET_NAME} asapo-worker ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
+target_link_libraries(${TARGET_NAME} asapo-consumer ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
diff --git a/examples/pipeline/in_to_out/Makefile.in b/examples/pipeline/in_to_out/Makefile.in
index ae61c2ed41f2448088fbda0b24b71bd6ba5e7bd6..845611464d7267b11a64603da1a76a188455d297 100644
--- a/examples/pipeline/in_to_out/Makefile.in
+++ b/examples/pipeline/in_to_out/Makefile.in
@@ -3,7 +3,7 @@ PROGRAM=@TARGET_NAME@
 CXX=g++
 CXXFLAGS=-std=c++11
 LDFLAGS=-pthread  -static-libgcc -static-libstdc++
-LIBS=-L @CMAKE_INSTALL_PREFIX@/lib -lasapo-worker -L ${LIBCURL_DIR}/lib -lcurl
+LIBS=-L @CMAKE_INSTALL_PREFIX@/lib -lasapo-consumer -L ${LIBCURL_DIR}/lib -lcurl
 INCLUDE=-I @CMAKE_INSTALL_PREFIX@/include -I ${LIBCURL_DIR}/include
 RM=rm -f
 
diff --git a/examples/pipeline/in_to_out/in_to_out.cpp b/examples/pipeline/in_to_out/in_to_out.cpp
index ac56be7218948e738480e5a73d03ccbfa89b1135..ac2ee3e5c4e899e37b7493d150afce9379b4e9d5 100644
--- a/examples/pipeline/in_to_out/in_to_out.cpp
+++ b/examples/pipeline/in_to_out/in_to_out.cpp
@@ -10,7 +10,7 @@
 #include <string>
 #include <sstream>
 
-#include "asapo_worker.h"
+#include "asapo_consumer.h"
 #include "asapo_producer.h"
 
 using std::chrono::system_clock;
@@ -59,7 +59,7 @@ void WaitConsumerThreadsFinished(std::vector<std::thread>* threads) {
 int ProcessError(const Error& err) {
     if (err == nullptr) return 0;
     std::cout << err->Explain() << std::endl;
-    return err == asapo::IOErrorTemplates::kTimeout ? 0 : 1;
+    return err == asapo::ConsumerErrorTemplates::kEndOfStream ? 0 : 1;
 }
 
 BrokerPtr CreateBrokerAndGroup(const Args& args, Error* err) {
@@ -149,7 +149,7 @@ std::vector<std::thread> StartConsumerThreads(const Args& args, const ProducerPt
             auto err = ProcessNextEvent(args, broker, producer);
             if (err) {
                 (*errors)[i] += ProcessError(err);
-                if (err == asapo::IOErrorTemplates::kTimeout) {
+                if (err == asapo::ConsumerErrorTemplates::kEndOfStream) {
                     break;
                 }
             }
diff --git a/examples/pipeline/in_to_out_python/CMakeLists.txt b/examples/pipeline/in_to_out_python/CMakeLists.txt
index 2aecf0dfabbc62f1e29cf62a4e7e966dd8a73e62..c52a878b077c79429ef7a56da9ebcd2ac3e16536 100644
--- a/examples/pipeline/in_to_out_python/CMakeLists.txt
+++ b/examples/pipeline/in_to_out_python/CMakeLists.txt
@@ -3,14 +3,14 @@ set(TARGET_NAME pipeline_inout_python)
 prepare_asapo()
 
 if (UNIX)
-    get_target_property(PYTHON_LIBS_WORKER python-lib2 BINARY_DIR)
+    get_target_property(PYTHON_LIBS_CONSUMER python-lib2 BINARY_DIR)
     get_target_property(PYTHON_LIBS_PRODUCER python-lib2-producer BINARY_DIR)
 else()
-    get_target_property(PYTHON_LIBS_WORKER asapo_worker BINARY_DIR)
+    get_target_property(PYTHON_LIBS_CONSUMER asapo_consumer BINARY_DIR)
     get_target_property(PYTHON_LIBS_PRODUCER asapo_producer BINARY_DIR)
 endif()
 
 file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/in_to_out.py TEST_SCRIPT )
 
-add_script_test("${TARGET_NAME}" "python ${PYTHON_LIBS_WORKER} ${PYTHON_LIBS_PRODUCER} ${TEST_SCRIPT} " nomem)
+add_script_test("${TARGET_NAME}" "python ${PYTHON_LIBS_CONSUMER} ${PYTHON_LIBS_PRODUCER} ${TEST_SCRIPT} " nomem)
 
diff --git a/examples/pipeline/in_to_out_python/in_to_out.py b/examples/pipeline/in_to_out_python/in_to_out.py
index 11b3c5cf7012d83bbcba06bafdbe763323cc3d7d..c8d5aed3fcfd9f9d0d0d38c45ecfc068c37cb981 100644
--- a/examples/pipeline/in_to_out_python/in_to_out.py
+++ b/examples/pipeline/in_to_out_python/in_to_out.py
@@ -1,6 +1,6 @@
 from __future__ import print_function
 
-import asapo_worker
+import asapo_consumer
 import asapo_producer
 import json
 import sys
@@ -29,30 +29,16 @@ def wait_send(n_files, timeout_s):
             break
         time.sleep(1)
 
-def assert_err(err):
-    if err is not None:
-        print(err)
-        sys.stdout.flush()
-        sys.exit(1)
-
-
-
-
-
 source, path, beamtime,stream_in, stream_out, token, timeout_s,nthreads, transfer_data = sys.argv[1:]
 timeout_s=int(timeout_s)
 nthreads=int(nthreads)
 transfer_data=int(transfer_data)>0
 
-broker, err = asapo_worker.create_server_broker(source,path, beamtime,stream_in,token,timeout_s*1000)
-assert_err(err)
+broker = asapo_consumer.create_server_broker(source,path, beamtime,stream_in,token,timeout_s*1000)
 
+producer  = asapo_producer.create_producer(source,beamtime, stream_out, token, nthreads)
 
-producer, err = asapo_producer.create_producer(source,beamtime, stream_out, token, nthreads)
-assert_err(err)
-
-group_id, err = broker.generate_group_id()
-assert_err(err)
+group_id  = broker.generate_group_id()
 
 n_recv = 0
 
@@ -62,19 +48,16 @@ else:
     ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY
 
 while True:
-    data, meta, err = broker.get_next(group_id, meta_only=not transfer_data)
-    if err is None:
+    try:
+        data, meta = broker.get_next(group_id, meta_only=not transfer_data)
         print ("received: ",meta)
         n_recv = n_recv + 1
-        err = producer.send_data(meta['_id'],meta['name']+"_"+stream_out ,data,
+        producer.send_data(meta['_id'],meta['name']+"_"+stream_out ,data,
                              ingest_mode = ingest_mode, callback = callback)
-        assert_err(err)
-    elif 'timeout' in err:
-            break
-    else:
-        assert_err(err)
-
-
+    except  asapo_consumer.AsapoEndOfStreamError:
+        break
+    except  asapo_producer.AsapoProducerError:
+        break
 
 
 wait_send(n_recv,timeout_s)
diff --git a/examples/worker/getnext_broker_python/getnext.py b/examples/worker/getnext_broker_python/getnext.py
deleted file mode 100644
index f3ec1454f76eeb6faef67dff6829051895898ca9..0000000000000000000000000000000000000000
--- a/examples/worker/getnext_broker_python/getnext.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from __future__ import print_function
-
-import asapo_worker
-import json
-import sys
-
-source, path, beamtime, token, group_id = sys.argv[1:]
-
-broker, err = asapo_worker.create_server_broker(source,path, beamtime,"",token,1000)
-
-
-if group_id == "new":
-    group_id_new, err = broker.generate_group_id()
-    if err != None:
-        print ('cannot generate group id, err: ', err)
-    else:
-        print ('generated group id: ', group_id_new)
-else:
-    group_id_new = group_id
-
-_, meta, err = broker.get_next(group_id_new, meta_only=True)
-if err != None:
-    print ('err: ', err)
-else:
-    print ('filename: ', meta['name'])
-    print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
-
-
-beamtime_meta,err = broker.get_beamtime_meta()
-if err != None:
-    print ('error getting beamtime meta: ', err)
-else:
-    print ('beamtime meta: ', json.dumps(beamtime_meta, indent=4, sort_keys=True))
diff --git a/examples/worker/process_folder/CMakeLists.txt b/examples/worker/process_folder/CMakeLists.txt
deleted file mode 100644
index db51972d2d3dfa5d1079fb93927aef6bb8f7520f..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/CMakeLists.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-set(TARGET_NAME worker_processfolder)
-set(SOURCE_FILES process_folder.cpp)
-
-add_executable(${TARGET_NAME} ${SOURCE_FILES})
-
-#Add all necessary common libraries
-GET_PROPERTY(ASAPO_COMMON_IO_LIBRARIES GLOBAL PROPERTY ASAPO_COMMON_IO_LIBRARIES)
-target_link_libraries(${TARGET_NAME} ${ASAPO_COMMON_IO_LIBRARIES})
-
-target_link_libraries(${TARGET_NAME} asapo-worker)
-set_target_properties(${TARGET_NAME} PROPERTIES LINKER_LANGUAGE CXX)
-#use expression generator to get rid of VS adding Debug/Release folders
-set_target_properties(${TARGET_NAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
-        ${CMAKE_CURRENT_BINARY_DIR}$<$<CONFIG:Debug>:>
-        )
-
-get_target_property(VAR ${TARGET_NAME} RUNTIME_OUTPUT_DIRECTORY)
-
-if (CMAKE_COMPILER_IS_GNUCXX)
-    set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_DEBUG "--coverage")
-endif()
-
-add_script_test("${TARGET_NAME}" ${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME})
-
-
-set (dir examples/worker/process_folder)
-install(TARGETS ${TARGET_NAME} DESTINATION "${dir}")
-install(FILES ${SOURCE_FILES} DESTINATION "${dir}")
-
-configure_file(CMakeLists_separate.in CMakeLists_separate.txt @ONLY)
-install(FILES ${CMAKE_CURRENT_BINARY_DIR}/CMakeLists_separate.txt DESTINATION "${dir}" RENAME CMakeLists.txt)
-
-configure_file(Makefile.in Makefile_LINUX @ONLY)
-install(FILES ${CMAKE_CURRENT_BINARY_DIR}/Makefile_LINUX DESTINATION "${dir}")
diff --git a/examples/worker/process_folder/CMakeLists_separate.in b/examples/worker/process_folder/CMakeLists_separate.in
deleted file mode 100644
index a250ea32fa680b894ecd3651ad2a82e741afba14..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/CMakeLists_separate.in
+++ /dev/null
@@ -1,22 +0,0 @@
-cmake_minimum_required(VERSION 2.8)
-
-project(worker_processfolder)
-
-set(CMAKE_CXX_STANDARD 11)
-
-IF(CMAKE_C_COMPILER_ID STREQUAL "GNU")
-    SET( CMAKE_EXE_LINKER_FLAGS  "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
-ENDIF()
-
-find_package (Threads)
-
-set(TARGET_NAME ${CMAKE_PROJECT_NAME})
-
-set(SOURCE_FILES process_folder.cpp)
-
-link_directories(@CMAKE_INSTALL_PREFIX@/lib)
-
-add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_include_directories(${TARGET_NAME} PUBLIC @CMAKE_INSTALL_PREFIX@/include)
-target_link_libraries(${TARGET_NAME} asapo-worker ${CMAKE_THREAD_LIBS_INIT})
-
diff --git a/examples/worker/process_folder/Makefile.in b/examples/worker/process_folder/Makefile.in
deleted file mode 100644
index 5f7b7a258757cbad3b17264b5eec2532250911a2..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/Makefile.in
+++ /dev/null
@@ -1,25 +0,0 @@
-PROGRAM=worker_processfolder
-
-CXX=g++
-CXXFLAGS=-std=c++11
-LDFLAGS=-pthread  -static-libgcc -static-libstdc++
-LIBS=-L @CMAKE_INSTALL_PREFIX@/lib -lasapo-worker
-INCLUDE=@CMAKE_INSTALL_PREFIX@/include
-RM=rm -f
-
-SRCS=process_folder.cpp
-OBJS=$(subst .cpp,.o,$(SRCS))
-
-all: $(PROGRAM)
-
-$(PROGRAM): $(OBJS)
-	$(CXX) $(LDFLAGS) -o $@ $^ $(LIBS)
-
-%.o: %.cpp
-	$(CXX) $(CXXFLAGS) -I$(INCLUDE) -c -o $@ $<
-
-clean:
-	$(RM) $(OBJS)
-
-distclean: clean
-	$(RM) $(PROGRAM)
diff --git a/examples/worker/process_folder/check_linux.sh b/examples/worker/process_folder/check_linux.sh
deleted file mode 100755
index 905190558f4918d431128169c854a48ba91c564c..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/check_linux.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-mkdir -p test
-touch test/1
-
-$@ test | grep "Processed 1 file(s)"
-
-rm -rf test
diff --git a/examples/worker/process_folder/check_windows.bat b/examples/worker/process_folder/check_windows.bat
deleted file mode 100644
index 27e5525f9b9f2dc38fd2bd566748da488c08b44c..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/check_windows.bat
+++ /dev/null
@@ -1,6 +0,0 @@
-mkdir test
-echo "" > test/1
-
-.\worker_processfolder test | findstr /c:"Processed 1 file(s)"
-
-rmdir /S /Q test
diff --git a/examples/worker/process_folder/process_folder.cpp b/examples/worker/process_folder/process_folder.cpp
deleted file mode 100644
index a5b836ca886c0412b4abefdb0550232915a0f5d3..0000000000000000000000000000000000000000
--- a/examples/worker/process_folder/process_folder.cpp
+++ /dev/null
@@ -1,98 +0,0 @@
-#include <iostream>
-#include <memory>
-#include <vector>
-#include <algorithm>
-#include <thread>
-#include <chrono>
-#include <iomanip>
-
-#include "asapo_worker.h"
-
-
-using std::chrono::system_clock;
-using asapo::Error;
-
-struct Statistics {
-    std::chrono::milliseconds duration_scan;
-    std::chrono::milliseconds duration_read;
-    int nfiles;
-    double size_gb;
-    double bandwidth;
-};
-std::string ProcessCommandArguments(int argc, char* argv[]) {
-    if (argc != 2) {
-        std::cout << "Usage: " + std::string{argv[0]} +" <path to folder>" << std::endl;
-        exit(EXIT_FAILURE);
-    }
-    return argv[1];
-}
-
-std::unique_ptr<asapo::DataBroker> CreateBroker(const std::string& folder) {
-    Error err;
-    auto broker = asapo::DataBrokerFactory::CreateFolderBroker(folder, &err);
-    if (err != nullptr) {
-        std::cout << "Cannot create broker" << std::endl;
-        exit(EXIT_FAILURE);
-    }
-
-    return broker;
-}
-
-void ConnectToBrocker(std::unique_ptr<asapo::DataBroker>* broker, Statistics* statistics) {
-    system_clock::time_point t1 = system_clock::now();
-    Error err = (*broker)->Connect();
-    if (err != nullptr) {
-        std::cout << err->Explain() << std::endl;
-        exit(EXIT_FAILURE);
-    }
-    system_clock::time_point t2 = system_clock::now();
-    statistics->duration_scan = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 );
-}
-
-void ReadAllData(std::unique_ptr<asapo::DataBroker>* broker, Statistics* statistics) {
-    Error err;
-    asapo::FileInfo file_info;
-    asapo::FileData file_data;
-    system_clock::time_point t1 = system_clock::now();
-
-    int nfiles = 0;
-    uint64_t size = 0;
-    while ((err = (*broker)->GetNext(&file_info, "", &file_data)) == nullptr) {
-        nfiles++;
-        size += file_info.size;
-    }
-    if (err->GetErrorType() != asapo::ErrorType::kEndOfFile) {
-        std::cout << err->Explain() << std::endl;
-        exit(EXIT_FAILURE);
-    }
-
-    system_clock::time_point t2 = system_clock::now();
-    statistics->nfiles = nfiles;
-    statistics->size_gb = double(size) / 1000 / 1000 / 1000;
-    statistics->duration_read = std::chrono::duration_cast<std::chrono::milliseconds>( t2 - t1 );
-    statistics->bandwidth = statistics->size_gb / statistics->duration_read.count() * 1000;
-}
-
-
-void PrintStatistics(const Statistics& statistics) {
-    std::cout << "Processed " << statistics.nfiles << " file(s)" << std::endl;
-    std::cout << "Total size: " << std::setprecision(2) << statistics.size_gb << "GB" << std::endl;
-    std::cout << "Elapsed scan : " << statistics.duration_scan.count() << "ms" << std::endl;
-    std::cout << "Elapsed read : " << statistics.duration_read.count() << "ms" << std::endl;
-    std::cout << "Bandwidth: " << std::setprecision(2) << statistics.bandwidth << "GB/sec" << std::endl;
-}
-
-
-int main(int argc, char* argv[]) {
-    asapo::ExitAfterPrintVersionIfNeeded("Process Folder Broker Example", argc, argv);
-
-    std::string folder = ProcessCommandArguments(argc, argv);
-    auto broker = CreateBroker(folder);
-
-    Statistics statistics;
-    ConnectToBrocker(&broker, &statistics);
-    ReadAllData(&broker, &statistics);
-    PrintStatistics(statistics);
-
-    return 0;
-}
diff --git a/producer/api/cpp/include/asapo_producer.h b/producer/api/cpp/include/asapo_producer.h
index 1d9ea25d79275075942ec2ba0184be24d6335691..218334b9ab9d1fa80849fc15f8d57cf3576e2502 100644
--- a/producer/api/cpp/include/asapo_producer.h
+++ b/producer/api/cpp/include/asapo_producer.h
@@ -5,6 +5,7 @@
 #include "common/version.h"
 
 #include "producer/producer.h"
+#include "producer/producer_error.h"
 
 
 #endif //ASAPO_ASAPO_PRODUCER_H
diff --git a/producer/api/cpp/include/producer/producer_error.h b/producer/api/cpp/include/producer/producer_error.h
index a77e6c47af158042a35e641fae46fecad6da79e8..23bbfd81daedde0c40cb633f39d13adbe0e38bab 100644
--- a/producer/api/cpp/include/producer/producer_error.h
+++ b/producer/api/cpp/include/producer/producer_error.h
@@ -6,8 +6,6 @@
 namespace asapo {
 
 enum class ProducerErrorType {
-    kAlreadyConnected,
-    kConnectionNotReady,
     kFileTooLarge,
     kFileNameTooLong,
     kEmptyFileName,
@@ -28,12 +26,7 @@ enum class ProducerErrorType {
 using ProducerErrorTemplate = ServiceErrorTemplate<ProducerErrorType, ErrorType::kProducerError>;
 
 namespace ProducerErrorTemplates {
-auto const kAlreadyConnected = ProducerErrorTemplate {
-    "Already connected", ProducerErrorType::kAlreadyConnected
-};
-auto const kConnectionNotReady = ProducerErrorTemplate {
-    "Connection not ready", ProducerErrorType::kConnectionNotReady
-};
+
 
 auto const kWrongIngestMode = ProducerErrorTemplate {
     "wrong ingest mode", ProducerErrorType::kWrongIngestMode
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index bfd4a043f9cb56e5d15bf2e9773394bbcc19834f..0d1831147b8a2b119b587a176d197b0a46e56c6b 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -6,14 +6,33 @@ from libcpp cimport bool
 ctypedef unsigned char uint8_t
 ctypedef unsigned long uint64_t
 
+ctypedef unique_ptr[ErrorInterface] Error
+
 cdef extern from "asapo_producer.h" namespace "asapo":
-  cppclass Error:
+  cppclass CustomErrorData:
     pass
+  cppclass ErrorInterface:
+    string Explain()
+  cppclass ErrorTemplateInterface:
+    pass
+  cdef bool operator==(Error lhs, ErrorTemplateInterface rhs)
 
-
-cdef extern from "asapo_wrappers.h" namespace "asapo":
-    string GetErrorString(Error* err)
-
+cdef extern from "asapo_producer.h" namespace "asapo":
+  ErrorTemplateInterface kFileTooLarge "asapo::ProducerErrorTemplates::kFileTooLarge"
+  ErrorTemplateInterface kFileNameTooLong "asapo::ProducerErrorTemplates::kFileNameTooLong"
+  ErrorTemplateInterface kEmptyFileName "asapo::ProducerErrorTemplates::kEmptyFileName"
+  ErrorTemplateInterface kNoData "asapo::ProducerErrorTemplates::kNoData"
+  ErrorTemplateInterface kZeroDataSize "asapo::ProducerErrorTemplates::kZeroDataSize"
+  ErrorTemplateInterface kBeamtimeIdTooLong "asapo::ProducerErrorTemplates::kBeamtimeIdTooLong"
+  ErrorTemplateInterface kBeamtimeAlreadySet "asapo::ProducerErrorTemplates::kBeamtimeAlreadySet"
+  ErrorTemplateInterface kFileIdAlreadyInUse "asapo::ProducerErrorTemplates::kFileIdAlreadyInUse"
+  ErrorTemplateInterface kErrorInMetadata "asapo::ProducerErrorTemplates::kErrorInMetadata"
+  ErrorTemplateInterface kErrorSubsetSize "asapo::ProducerErrorTemplates::kErrorSubsetSize"
+  ErrorTemplateInterface kAuthorizationFailed "asapo::ProducerErrorTemplates::kAuthorizationFailed"
+  ErrorTemplateInterface kInternalServerError "asapo::ProducerErrorTemplates::kInternalServerError"
+  ErrorTemplateInterface kCannotSendDataToReceivers "asapo::ProducerErrorTemplates::kCannotSendDataToReceivers"
+  ErrorTemplateInterface kRequestPoolIsFull "asapo::ProducerErrorTemplates::kRequestPoolIsFull"
+  ErrorTemplateInterface kWrongIngestMode "asapo::ProducerErrorTemplates::kWrongIngestMode"
 
 cdef extern from "asapo_producer.h" namespace "asapo":
   cppclass FileData:
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index d3c884afff427085c70f0b9ebdd1e8345f94709d..1e60d18d8b90470ef061c539b049f303de01641b 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -34,6 +34,21 @@ cdef bytes _bytes(s):
         raise TypeError("Could not convert to unicode.")
 
 
+class AsapoProducerError(Exception):
+  pass
+
+class AsapoWrongInputError(AsapoProducerError):
+  pass
+
+#todo: more types
+cdef python_exception_from_error(Error& err):
+    return AsapoProducerError(err.get().Explain())
+
+
+cdef throw_exception(Error& err):
+    raise python_exception_from_error(err)
+
+
 cdef void* data_pointer_nparray(data):
   if data is None:
     return <void*>NULL
@@ -77,16 +92,14 @@ cdef class PyProducer:
         err = self.c_producer.get().SendData_(event_header, data_pointer_nparray(data), ingest_mode,
             unwrap_callback_with_memory(<RequestCallbackCythonMemory>self.c_callback_ndarr,
              <void*>self,<void*>callback if callback != None else NULL, <void*>data))
-        cdef err_str = GetErrorString(&err)
-        if err_str.strip():
-            return err_str
-        else:
-          if data is not None:
-            if data.base is not None:
-              Py_XINCREF(<PyObject*>data.base)
-            else:
-              Py_XINCREF(<PyObject*>data)
-          return None
+        if err:
+            throw_exception(err)
+        if data is not None:
+          if data.base is not None:
+            Py_XINCREF(<PyObject*>data.base)
+          else:
+            Py_XINCREF(<PyObject*>data)
+        return
     cdef EventHeader create_event_header(self,int id, exposed_path,user_meta,subset,ingest_mode):
         cdef EventHeader event_header
         event_header.file_id = id
@@ -106,12 +119,10 @@ cdef class PyProducer:
         err = self.c_producer.get().SendData_(event_header, data_pointer_bytes(data), ingest_mode,
             unwrap_callback_with_memory(<RequestCallbackCythonMemory>self.c_callback_bytesaddr,
              <void*>self,<void*>callback if callback != None else NULL, <void*>data))
-        cdef err_str = GetErrorString(&err)
-        if err_str.strip():
-            return err_str
-        else:
-            Py_XINCREF(<PyObject*>data)
-            return None
+        if err:
+            throw_exception(err)
+        Py_XINCREF(<PyObject*>data)
+        return
 
     def send_data(self, id, exposed_path,data, user_meta=None,subset=None,ingest_mode = DEFAULT_INGEST_MODE,callback=None):
         """
@@ -129,16 +140,16 @@ cdef class PyProducer:
          :type ingest_mode: int
          :param callback: callback function, default None
          :type callback: callback(info,err), where info - json string with event header that was used to send data, err - error string or None
-         :return: error, None if success.
+         :raises: AsapoProducerError
          :rtype: string
         """
 
         if type(data) == np.ndarray or data == None:
-            return self.__send_np_array(id,exposed_path,data,user_meta,subset,ingest_mode,callback)
+            self.__send_np_array(id,exposed_path,data,user_meta,subset,ingest_mode,callback)
         elif type(data) == bytes:
-            return self.__send_bytes(id,exposed_path,data,user_meta,subset,ingest_mode,callback)
+            self.__send_bytes(id,exposed_path,data,user_meta,subset,ingest_mode,callback)
         else:
-            return "wrong data type: " + str(type(data))
+            raise(AsapoProducerError("wrong data type: " + str(type(data))))
 
     def send_file(self, id, local_path, exposed_path,user_meta=None,subset=None,ingest_mode = DEFAULT_INGEST_MODE,callback=None):
         """
@@ -156,7 +167,7 @@ cdef class PyProducer:
          :type ingest_mode: int
          :param callback: callback function, default None
          :type callback: callback(info,err), where info - json string with event header that was used to send data, err - error string or None
-         :return: error, None if success.
+         :raises: AsapoProducerError
          :rtype: string
         """
 
@@ -164,24 +175,21 @@ cdef class PyProducer:
         event_header.file_size = 0
         err = self.c_producer.get().SendFile(event_header, _bytes(local_path), ingest_mode,
             unwrap_callback(<RequestCallbackCython>self.c_callback, <void*>self,<void*>callback if callback != None else NULL))
-        cdef err_str = GetErrorString(&err)
-        if err_str.strip():
-            return err_str
-        else:
-            return None
-    cdef void c_callback_python(self,py_callback, GenericRequestHeader header, string err_str):
+        if err:
+            throw_exception(err)
+        return
+    cdef void c_callback_python(self,py_callback, GenericRequestHeader header, Error& err):
         if py_callback != None:
           info_str = _str(header.Json())
           info = json.loads(info_str)
-          if err_str.strip():
-             py_err = err_str
+          if err:
+            py_err = python_exception_from_error(err)
           else:
-             py_err = None
+            py_err = None
           py_callback(info,py_err)
 
     cdef void c_callback(self,py_callback, GenericRequestHeader header, Error err) with gil:
-        cdef err_str = GetErrorString(&err)
-        self.c_callback_python(py_callback,header,"")
+        self.c_callback_python(py_callback,header,err)
 
     cdef void c_callback_ndarr(self,py_callback,nd_array,GenericRequestHeader header, Error err) with gil:
         if nd_array is not None:
@@ -189,12 +197,12 @@ cdef class PyProducer:
             Py_XDECREF(<PyObject*>nd_array.base)
           else:
             Py_XDECREF(<PyObject*>nd_array)
-        self.c_callback_python(py_callback,header,GetErrorString(&err))
+        self.c_callback_python(py_callback,header,err)
 
     cdef void c_callback_bytesaddr(self,py_callback,bytes_array,GenericRequestHeader header, Error err) with gil:
         if bytes_array is not None:
             Py_XDECREF(<PyObject*>bytes_array)
-        self.c_callback_python(py_callback,header,GetErrorString(&err))
+        self.c_callback_python(py_callback,header,err)
 
     @staticmethod
     def __create_producer(endpoint,beamtime_id,stream,token,nthreads):
@@ -205,11 +213,9 @@ cdef class PyProducer:
         source.user_token = token
         source.stream = stream
         pyProd.c_producer = Producer.Create(endpoint,nthreads,RequestHandlerType_Tcp,source,&err)
-        cdef err_str = GetErrorString(&err)
-        if err_str.strip():
-            return None,err_str
-        else:
-            return pyProd,None
+        if err:
+            throw_exception(err)
+        return pyProd
 
 def create_producer(endpoint,beamtime_id,stream,token,nthreads):
     return PyProducer.__create_producer(_bytes(endpoint),_bytes(beamtime_id),_bytes(stream),_bytes(token),nthreads)
diff --git a/receiver/src/receiver_data_server/tcp_server.cpp b/receiver/src/receiver_data_server/tcp_server.cpp
index 5e45ac6698963b6775357ba85e49090336a33595..4a924745f9cab11081790b2b52b2fcb6b6e88776 100644
--- a/receiver/src/receiver_data_server/tcp_server.cpp
+++ b/receiver/src/receiver_data_server/tcp_server.cpp
@@ -95,7 +95,7 @@ Error TcpServer::SendData(uint64_t source_id, void* buf, uint64_t size) const no
     Error err;
     io__->Send(source_id, buf, size, &err);
     if (err) {
-        log__->Error("cannot send to worker" + err->Explain());
+        log__->Error("cannot send to consumer" + err->Explain());
     }
     return err;
 }
diff --git a/sphinx/source/conf.py b/sphinx/source/conf.py
index 7bcf43111ca77b773438b90d1857a8826c1660dc..c7cb69f0c94a93647596a605daa90c3e086fabc3 100644
--- a/sphinx/source/conf.py
+++ b/sphinx/source/conf.py
@@ -19,7 +19,7 @@
 import os
 import sys
 
-sys.path.insert(0, os.path.abspath('../../../worker/api/python'))
+sys.path.insert(0, os.path.abspath('../../../consumer/api/python'))
 sys.path.insert(0, os.path.abspath('../../../producer/api/python'))
 
 
diff --git a/sphinx/source/consumer.rst b/sphinx/source/consumer.rst
index 4bda1c1a0f323091a120ed025b110a4790c7d53b..451c070feb1f6b1c357c06f0027493226ceab1c1 100644
--- a/sphinx/source/consumer.rst
+++ b/sphinx/source/consumer.rst
@@ -3,7 +3,7 @@ Consumer
 ##################
 
 
-.. automodule:: asapo_worker
+.. automodule:: asapo_consumer
    :members:
    :undoc-members:
    :show-inheritance:
diff --git a/tests/automatic/CMakeLists.txt b/tests/automatic/CMakeLists.txt
index ea37ce600f63e9b4fe92fa165313b08a91f9aa11..2f73aa174c687d7ea3102923a9af08b706ad8d85 100644
--- a/tests/automatic/CMakeLists.txt
+++ b/tests/automatic/CMakeLists.txt
@@ -17,7 +17,7 @@ endif()
 
 add_subdirectory(authorizer)
 
-add_subdirectory(worker)
+add_subdirectory(consumer)
 
 add_subdirectory(curl_http_client)
 
diff --git a/tests/automatic/broker/get_next/check_linux.sh b/tests/automatic/broker/get_next/check_linux.sh
index 0b0b2507597ad113aac68ff2341a3ab2a36ee846..760c2402b28bff4b7faa074f71a72f821d244b34 100644
--- a/tests/automatic/broker/get_next/check_linux.sh
+++ b/tests/automatic/broker/get_next/check_linux.sh
@@ -25,7 +25,7 @@ brokerid=`echo $!`
 groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
 curl -v  --silent 127.0.0.1:5005/database/data/stream/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":1'
 curl -v  --silent 127.0.0.1:5005/database/data/stream/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":2'
-curl -v  --silent 127.0.0.1:5005/database/data/stream/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep "not found"
+curl -v  --silent 127.0.0.1:5005/database/data/stream/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"id_max":2'
 
 # with a new group
 groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
diff --git a/tests/automatic/broker/get_next/check_windows.bat b/tests/automatic/broker/get_next/check_windows.bat
index 02733449e6873d4b21aeb0c27e55c656a409e80a..9575988a038ac53b06299328c732f064a31060de 100644
--- a/tests/automatic/broker/get_next/check_windows.bat
+++ b/tests/automatic/broker/get_next/check_windows.bat
@@ -18,7 +18,7 @@ C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
 set /P groupid=< groupid
 C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/stream/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
 C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/stream/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/stream/%groupid%/next?token=%token% --stderr - | findstr  /c:"not found"  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/stream/%groupid%/next?token=%token% --stderr - | findstr  /c:\"id_max\":2  || goto :error
 
 C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
 set /P groupid=< groupid
diff --git a/tests/automatic/bug_fixes/CMakeLists.txt b/tests/automatic/bug_fixes/CMakeLists.txt
index 6f881e0f1aa18143112861f8e4cc2fb4c454a167..720ea565917838813a4631cf1db4e19cc8f8cc20 100644
--- a/tests/automatic/bug_fixes/CMakeLists.txt
+++ b/tests/automatic/bug_fixes/CMakeLists.txt
@@ -1,6 +1,6 @@
 if (UNIX)
     add_subdirectory(receiver_cpu_usage)
-    add_subdirectory(worker_python_memleak)
+    add_subdirectory(consumer_python_memleak)
 endif()
 
 add_subdirectory(producer_send_after_restart)
\ No newline at end of file
diff --git a/tests/automatic/bug_fixes/worker_python_memleak/CMakeLists.txt b/tests/automatic/bug_fixes/consumer_python_memleak/CMakeLists.txt
similarity index 66%
rename from tests/automatic/bug_fixes/worker_python_memleak/CMakeLists.txt
rename to tests/automatic/bug_fixes/consumer_python_memleak/CMakeLists.txt
index 333fea40e98f9f54801a3d58aa64d8a624e56f87..b139ef5ebdc798d675f3e90f6da10c1b8e2b764f 100644
--- a/tests/automatic/bug_fixes/worker_python_memleak/CMakeLists.txt
+++ b/tests/automatic/bug_fixes/consumer_python_memleak/CMakeLists.txt
@@ -1,4 +1,4 @@
-set(TARGET_NAME worker_python_memleak)
+set(TARGET_NAME consumer_python_memleak)
 
 
 prepare_asapo()
@@ -6,7 +6,7 @@ prepare_asapo()
 if (UNIX)
     get_target_property(PYTHON_LIBS python-lib2 BINARY_DIR)
 else()
-    get_target_property(PYTHON_LIBS asapo_worker BINARY_DIR)
+    get_target_property(PYTHON_LIBS asapo_consumer BINARY_DIR)
 endif()
 
 add_script_test("${TARGET_NAME}" ${PYTHON_LIBS} nomem)
diff --git a/tests/automatic/bug_fixes/worker_python_memleak/check_linux.sh b/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
similarity index 96%
rename from tests/automatic/bug_fixes/worker_python_memleak/check_linux.sh
rename to tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
index 747870cd4917556882b172584cb80cf3189a5637..75a7320e5e5d06fa9177c8614f77cbce7964007c 100644
--- a/tests/automatic/bug_fixes/worker_python_memleak/check_linux.sh
+++ b/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
@@ -32,7 +32,7 @@ dd if=/dev/zero of=$fname bs=$size count=1
 
 export PYTHONPATH=$1:${PYTHONPATH}
 
-export PYTHONPATH=/home/yakubov/projects/asapo/cmake-build-debug/worker/api/python:${PYTHONPATH}
+export PYTHONPATH=/home/yakubov/projects/asapo/cmake-build-debug/consumer/api/python:${PYTHONPATH}
 
 
 python memleak.py $endpoint $path $beamtime_id $token &> out &
diff --git a/tests/automatic/bug_fixes/worker_python_memleak/jira_issue.txt b/tests/automatic/bug_fixes/consumer_python_memleak/jira_issue.txt
similarity index 100%
rename from tests/automatic/bug_fixes/worker_python_memleak/jira_issue.txt
rename to tests/automatic/bug_fixes/consumer_python_memleak/jira_issue.txt
diff --git a/tests/automatic/bug_fixes/consumer_python_memleak/memleak.py b/tests/automatic/bug_fixes/consumer_python_memleak/memleak.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ff83df0d938ce8dec651b8a979dd0a78c2643af
--- /dev/null
+++ b/tests/automatic/bug_fixes/consumer_python_memleak/memleak.py
@@ -0,0 +1,22 @@
+import asapo_consumer
+import time
+import sys
+
+source, path, beamtime, token = sys.argv[1:]
+
+broker = asapo_consumer.create_server_broker(
+    source, path, beamtime, "stream", token, 1000)
+
+group_id  = broker.generate_group_id()
+print('generated group id: ', group_id)
+
+while True:
+    try:
+        data, meta  = broker.get_last(group_id, meta_only=False)
+        print('filename: ', meta['name'])
+    except Exception as err:
+        print('err: ', err)
+
+    sys.stdout.flush()
+    time.sleep(1)
+
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
index b9bcfd2b526fe57c088edef6ba9d5f0b7004b4e1..5c85570c9514e1011622585f61ffec64ca941111 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
@@ -50,7 +50,7 @@ echo hello > c:\tmp\asapo\test_in\test1\file3
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 3000 1 | findstr /c:"Processed 3 file(s)"  || goto :error
 
 
diff --git a/tests/automatic/bug_fixes/worker_python_memleak/memleak.py b/tests/automatic/bug_fixes/worker_python_memleak/memleak.py
deleted file mode 100644
index 7f1480550da93e73562e040d93ab76870f424e7d..0000000000000000000000000000000000000000
--- a/tests/automatic/bug_fixes/worker_python_memleak/memleak.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import asapo_worker
-import time
-import sys
-
-source, path, beamtime, token = sys.argv[1:]
-
-broker, err = asapo_worker.create_server_broker(
-    source, path, beamtime, "stream", token, 1000)
-
-group_id, err = broker.generate_group_id()
-if err is not None:
-    print('cannot generate group id, err: ', err)
-else:
-    print('generated group id: ', group_id)
-
-while True:
-    data, meta, err = broker.get_last(group_id, meta_only=False)
-    if err is not None:
-        print('err: ', err)
-    else:
-        print('filename: ', meta['name'])
-    sys.stdout.flush()
-    time.sleep(1)
diff --git a/tests/automatic/worker/CMakeLists.txt b/tests/automatic/consumer/CMakeLists.txt
similarity index 54%
rename from tests/automatic/worker/CMakeLists.txt
rename to tests/automatic/consumer/CMakeLists.txt
index 9028c6af84dda26971bf2bf3cbbc58d70cbaf925..1d4de1fa0d3a9b724ca3f2ff095b50b371f01052 100644
--- a/tests/automatic/worker/CMakeLists.txt
+++ b/tests/automatic/consumer/CMakeLists.txt
@@ -1,15 +1,13 @@
 CMAKE_MINIMUM_REQUIRED(VERSION 3.7) # needed for fixtures
 
-add_subdirectory(next_multithread_folder)
 add_subdirectory(next_multithread_broker)
-add_subdirectory(connect_multithread)
-add_subdirectory(worker_api)
+add_subdirectory(consumer_api)
 
 if (UNIX OR CMAKE_BUILD_TYPE STREQUAL "Release")
-    add_subdirectory(worker_api_python)
+    add_subdirectory(consumer_api_python)
 endif()
 
-if(BUILD_WORKER_TOOLS)
+if(BUILD_CONSUMER_TOOLS)
     add_subdirectory(folder_to_db)
 endif()
 
diff --git a/tests/automatic/worker/worker_api/CMakeLists.txt b/tests/automatic/consumer/consumer_api/CMakeLists.txt
similarity index 70%
rename from tests/automatic/worker/worker_api/CMakeLists.txt
rename to tests/automatic/consumer/consumer_api/CMakeLists.txt
index ddef8469695638d686ba469c7317b31ebd92cff9..f31804954685641b614a0ac0c8f38d53f2a984ca 100644
--- a/tests/automatic/worker/worker_api/CMakeLists.txt
+++ b/tests/automatic/consumer/consumer_api/CMakeLists.txt
@@ -1,12 +1,12 @@
-set(TARGET_NAME worker_api)
-set(SOURCE_FILES worker_api.cpp)
+set(TARGET_NAME consumer_api)
+set(SOURCE_FILES consumer_api.cpp)
 
 
 ################################
 # Executable and link
 ################################
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} test_common asapo-worker)
+target_link_libraries(${TARGET_NAME} test_common asapo-consumer)
 
 ################################
 # Testing
diff --git a/tests/automatic/worker/worker_api/check_linux.sh b/tests/automatic/consumer/consumer_api/check_linux.sh
similarity index 100%
rename from tests/automatic/worker/worker_api/check_linux.sh
rename to tests/automatic/consumer/consumer_api/check_linux.sh
diff --git a/tests/automatic/worker/worker_api/check_windows.bat b/tests/automatic/consumer/consumer_api/check_windows.bat
similarity index 100%
rename from tests/automatic/worker/worker_api/check_windows.bat
rename to tests/automatic/consumer/consumer_api/check_windows.bat
diff --git a/tests/automatic/worker/worker_api/worker_api.cpp b/tests/automatic/consumer/consumer_api/consumer_api.cpp
similarity index 84%
rename from tests/automatic/worker/worker_api/worker_api.cpp
rename to tests/automatic/consumer/consumer_api/consumer_api.cpp
index 82683d07ba4238ba9dd7ccb07b32afe7729ad45e..5ce5b98e141182318ef73174d061179a43772e80 100644
--- a/tests/automatic/worker/worker_api/worker_api.cpp
+++ b/tests/automatic/consumer/consumer_api/consumer_api.cpp
@@ -2,7 +2,8 @@
 #include <vector>
 #include <thread>
 #include <algorithm>
-#include "worker/data_broker.h"
+#include <asapo_consumer.h>
+#include "consumer/data_broker.h"
 #include "testing.h"
 
 using asapo::M_AssertEq;
@@ -54,25 +55,42 @@ void TestSingle(const std::unique_ptr<asapo::DataBroker>& broker, const std::str
     M_AssertTrue(fi.metadata == "{\"test\":10}", "GetLast metadata");
 
     err = broker->GetNext(&fi, group_id, nullptr);
-    M_AssertTrue(err != nullptr, "GetNext2 error");
+    M_AssertTrue(err == asapo::ConsumerErrorTemplates::kEndOfStream, "GetNext2 error");
+    auto error_data = static_cast<const asapo::ConsumerErrorData*>(err->GetCustomData());
+    M_AssertTrue(error_data->id_max == 10, "GetNext2 id max");
+
+
+    err = broker->SetLastReadMarker(2, group_id);
+    M_AssertTrue(err == nullptr, "SetLastReadMarker no error");
 
-    err = broker->GetLast(&fi, group_id, nullptr);
-    M_AssertTrue(err == nullptr, "GetLast2 no error");
 
     err = broker->GetById(8, &fi, group_id, nullptr);
     M_AssertTrue(err == nullptr, "GetById error");
     M_AssertTrue(fi.name == "8", "GetById filename");
 
+    err = broker->GetNext(&fi, group_id, nullptr);
+    M_AssertTrue(err == nullptr, "GetNext After GetById  no error");
+    M_AssertTrue(fi.name == "3", "GetNext After GetById filename");
+
+
+    err = broker->GetLast(&fi, group_id, nullptr);
+    M_AssertTrue(err == nullptr, "GetLast2 no error");
+
+
+    err = broker->SetLastReadMarker(8, group_id);
+    M_AssertTrue(err == nullptr, "SetLastReadMarker 2 no error");
+
+
     err = broker->GetNext(&fi, group_id, nullptr);
     M_AssertTrue(err == nullptr, "GetNext3 no error");
     M_AssertTrue(fi.name == "9", "GetNext3 filename");
 
-    auto size = broker->GetNDataSets(&err);
-    M_AssertTrue(err == nullptr, "GetNDataSets no error");
-    M_AssertTrue(size == 10, "GetNDataSets size");
+    auto size = broker->GetCurrentSize(&err);
+    M_AssertTrue(err == nullptr, "GetCurrentSize no error");
+    M_AssertTrue(size == 10, "GetCurrentSize size");
 
-    err = broker->ResetCounter(group_id);
-    M_AssertTrue(err == nullptr, "ResetCounter");
+    err = broker->ResetLastReadMarker(group_id);
+    M_AssertTrue(err == nullptr, "SetLastReadMarker");
 
     err = broker->GetNext(&fi, group_id, nullptr);
     M_AssertTrue(err == nullptr, "GetNext4 no error");
diff --git a/tests/automatic/worker/worker_api_python/CMakeLists.txt b/tests/automatic/consumer/consumer_api_python/CMakeLists.txt
similarity index 50%
rename from tests/automatic/worker/worker_api_python/CMakeLists.txt
rename to tests/automatic/consumer/consumer_api_python/CMakeLists.txt
index 7e6342c6f157ec21ef186c1d0a8b15873d978e0e..02e91a3f390528430184f56f349813025b4a09d6 100644
--- a/tests/automatic/worker/worker_api_python/CMakeLists.txt
+++ b/tests/automatic/consumer/consumer_api_python/CMakeLists.txt
@@ -1,4 +1,4 @@
-set(TARGET_NAME worker_api_python)
+set(TARGET_NAME consumer_api_python)
 
 
 prepare_asapo()
@@ -6,9 +6,9 @@ prepare_asapo()
 if (UNIX)
     get_target_property(PYTHON_LIBS python-lib2 BINARY_DIR)
 else()
-    get_target_property(PYTHON_LIBS asapo_worker BINARY_DIR)
+    get_target_property(PYTHON_LIBS asapo_consumer BINARY_DIR)
 endif()
 
 add_script_test("${TARGET_NAME}" ${PYTHON_LIBS} nomem)
-configure_file(worker_api.py worker_api.py COPYONLY)
+configure_file(consumer_api.py consumer_api.py COPYONLY)
 
diff --git a/tests/automatic/worker/worker_api_python/check_linux.sh b/tests/automatic/consumer/consumer_api_python/check_linux.sh
similarity index 85%
rename from tests/automatic/worker/worker_api_python/check_linux.sh
rename to tests/automatic/consumer/consumer_api_python/check_linux.sh
index c8a409864fa55f7b1e23eaea6fc400a31f60fe37..2851220aaefacaf8378080f8fd2b055ccc5c2f58 100644
--- a/tests/automatic/worker/worker_api_python/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api_python/check_linux.sh
@@ -37,7 +37,7 @@ sleep 1
 
 export PYTHONPATH=$1:${PYTHONPATH}
 
-python worker_api.py 127.0.0.1:8400 $source_path $beamtime_id $token_test_run single
+python consumer_api.py 127.0.0.1:8400 $source_path $beamtime_id $token_test_run single
 
 
 #check datasets
@@ -57,4 +57,4 @@ do
 done
 
 
-python worker_api.py 127.0.0.1:8400 $source_path $beamtime_id $token_test_run datasets
+python consumer_api.py 127.0.0.1:8400 $source_path $beamtime_id $token_test_run datasets
diff --git a/tests/automatic/worker/worker_api_python/check_windows.bat b/tests/automatic/consumer/consumer_api_python/check_windows.bat
similarity index 85%
rename from tests/automatic/worker/worker_api_python/check_windows.bat
rename to tests/automatic/consumer/consumer_api_python/check_windows.bat
index fdb572831ee2fda48eec8eb38eae30d1aa7bacc5..8e41ab16c826198968f4603748f9ae3dda69aa09 100644
--- a/tests/automatic/worker/worker_api_python/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api_python/check_windows.bat
@@ -21,13 +21,13 @@ echo hello1 > 1
 echo hello1 > 1_1
 
 
-python worker_api.py 127.0.0.1:8400 %source_path% %beamtime_id%  %token_test_run%  single || goto :error
+python consumer_api.py 127.0.0.1:8400 %source_path% %beamtime_id%  %token_test_run%  single || goto :error
 
 echo db.dropDatabase() | %mongo_exe% %database_name%
 
 for /l %%x in (1, 1, 10) do echo db.data.insert({"_id":%%x,"size":3,"images":[{"_id":1, "size":6,"name":"%%x_1","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}},{"_id":2, "size":6,"name":"%%x_2","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}},{"_id":3, "size":6,"name":"%%x_3","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}]}) | %mongo_exe% %database_name%  || goto :error
 
-python worker_api.py 127.0.0.1:8400 %source_path% %beamtime_id%  %token_test_run% datasets || goto :error
+python consumer_api.py 127.0.0.1:8400 %source_path% %beamtime_id%  %token_test_run% datasets || goto :error
 
 
 goto :clean
diff --git a/tests/automatic/consumer/consumer_api_python/consumer_api.py b/tests/automatic/consumer/consumer_api_python/consumer_api.py
new file mode 100644
index 0000000000000000000000000000000000000000..dcbf21bd35406ee694796e9baed776f76c125abb
--- /dev/null
+++ b/tests/automatic/consumer/consumer_api_python/consumer_api.py
@@ -0,0 +1,155 @@
+from __future__ import print_function
+
+import asapo_consumer
+import json
+import sys
+
+def exit_on_noerr(name):
+    print (name)
+    sys.exit(1)
+
+
+def assert_metaname(meta,compare,name):
+    if meta['name'] != compare:
+        print ("error at "+name)
+        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
+        sys.exit(1)
+
+def assert_usermetadata(meta,name):
+    if meta['meta']['test'] != 10:
+        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
+        print ("error at "+name)
+        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
+        sys.exit(1)
+
+
+def assert_eq(val,expected,name):
+    if val != expected:
+        print ("error at "+name)
+        print ('val: ', val,' expected: ',expected)
+        sys.exit(1)
+
+def check_single(broker,group_id_new):
+
+    _, meta = broker.get_next(group_id_new, meta_only=True)
+    assert_metaname(meta,"1","get next1")
+    assert_usermetadata(meta,"get next1")
+
+    data = broker.retrieve_data(meta)
+    assert_eq(data.tostring().decode("utf-8"),"hello1","retrieve_data data")
+
+    _, meta = broker.get_next(group_id_new, meta_only=True)
+    assert_metaname(meta,"2","get next2")
+    assert_usermetadata(meta,"get next2")
+
+    _, meta = broker.get_last(group_id_new, meta_only=True)
+    assert_metaname(meta,"5","get last1")
+    assert_usermetadata(meta,"get last1")
+
+    try:
+        _, meta = broker.get_next(group_id_new, meta_only=True)
+    except:
+        pass
+    else:
+        exit_on_noerr("get_next3")
+
+    size = broker.get_current_size()
+    assert_eq(size,5,"get_current_size")
+
+
+    broker.reset_lastread_marker(group_id_new)
+
+    _, meta = broker.get_next(group_id_new, meta_only=True)
+    assert_metaname(meta,"1","get next4")
+    assert_usermetadata(meta,"get next4")
+
+
+    _, meta = broker.get_by_id(3, group_id_new, meta_only=True)
+    assert_metaname(meta,"3","get get_by_id")
+    assert_usermetadata(meta,"get get_by_id")
+
+    _, meta = broker.get_next(group_id_new, meta_only=True)
+    assert_metaname(meta,"2","get next5")
+    assert_usermetadata(meta,"get next5")
+
+
+    broker.set_lastread_marker(4, group_id_new)
+
+    _, meta = broker.get_next(group_id_new, meta_only=True)
+    assert_metaname(meta,"5","get next6")
+    assert_usermetadata(meta,"get next6")
+
+
+    images = broker.query_images("meta.test = 10")
+    assert_eq(len(images),5,"size of query answer 1")
+    for image in images:
+        assert_usermetadata(image,"query_images")
+
+
+    images =  broker.query_images("meta.test = 10 AND name='1'")
+    assert_eq(len(images),1,"size of query answer 2 ")
+
+    for image in images:
+        assert_usermetadata(image,"query_images")
+
+    images = broker.query_images("meta.test = 11")
+    assert_eq(len(images),0,"size of query answer 3 ")
+
+    try:
+        images = broker.query_images("bla")
+    except:
+        pass
+    else:
+        exit_on_noerr("wrong query")
+
+def check_dataset(broker,group_id_new):
+    id, metas = broker.get_next_dataset(group_id_new)
+    assert_eq(id,1,"get_next_dataset1")
+    assert_metaname(metas[0],"1_1","get nextdataset1 name1")
+    assert_metaname(metas[1],"1_2","get nextdataset1 name2")
+    assert_usermetadata(metas[0],"get nextdataset1 meta")
+
+    data = broker.retrieve_data(metas[0])
+    assert_eq(data.tostring().decode("utf-8"),"hello1","retrieve_data from dataset data")
+
+
+    id, metas = broker.get_next_dataset(group_id_new)
+    assert_eq(id,2,"get_next_dataset2")
+    assert_metaname(metas[0],"2_1","get nextdataset2 name1")
+
+    id, metas = broker.get_last_dataset(group_id_new)
+    assert_eq(id,10,"get_last_dataset1")
+    assert_metaname(metas[2],"10_3","get get_last_dataset1 name3")
+
+    try:
+        id, metas = broker.get_next_dataset(group_id_new)
+    except asapo_consumer.AsapoEndOfStreamError as err:
+        assert_eq(err.id_max,10,"get_next_dataset3 id_max")
+        pass
+    else:
+        exit_on_noerr("get_next_dataset3 err")
+
+    id, metas = broker.get_dataset_by_id(8,group_id_new)
+    assert_eq(id,8,"get_dataset_by_id1 id")
+    assert_metaname(metas[2],"8_3","get get_dataset_by_id1 name3")
+
+    try:
+        id, metas = broker.get_next_dataset(group_id_new)
+    except:
+        pass
+    else:
+        exit_on_noerr("get_next_dataset4 err")
+
+source, path, beamtime, token, mode = sys.argv[1:]
+
+broker = asapo_consumer.create_server_broker(source,path, beamtime,"",token,1000)
+
+group_id_new = broker.generate_group_id()
+
+
+if mode == "single":
+    check_single(broker,group_id_new)
+
+if mode == "datasets":
+    check_dataset(broker,group_id_new)
+
diff --git a/tests/automatic/worker/folder_to_db/CMakeLists.txt b/tests/automatic/consumer/folder_to_db/CMakeLists.txt
similarity index 100%
rename from tests/automatic/worker/folder_to_db/CMakeLists.txt
rename to tests/automatic/consumer/folder_to_db/CMakeLists.txt
diff --git a/tests/automatic/worker/folder_to_db/check_linux.sh b/tests/automatic/consumer/folder_to_db/check_linux.sh
similarity index 100%
rename from tests/automatic/worker/folder_to_db/check_linux.sh
rename to tests/automatic/consumer/folder_to_db/check_linux.sh
diff --git a/tests/automatic/worker/folder_to_db/check_windows.bat b/tests/automatic/consumer/folder_to_db/check_windows.bat
similarity index 100%
rename from tests/automatic/worker/folder_to_db/check_windows.bat
rename to tests/automatic/consumer/folder_to_db/check_windows.bat
diff --git a/tests/automatic/worker/next_multithread_broker/CMakeLists.txt b/tests/automatic/consumer/next_multithread_broker/CMakeLists.txt
similarity index 85%
rename from tests/automatic/worker/next_multithread_broker/CMakeLists.txt
rename to tests/automatic/consumer/next_multithread_broker/CMakeLists.txt
index 69750c8f47ce37a2e8e7b4a5adccd022a6373328..6e0ef20c094b35d6b80e5cb4c422b29386bfe577 100644
--- a/tests/automatic/worker/next_multithread_broker/CMakeLists.txt
+++ b/tests/automatic/consumer/next_multithread_broker/CMakeLists.txt
@@ -6,7 +6,7 @@ set(SOURCE_FILES next_multithread_broker.cpp)
 # Executable and link
 ################################
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} test_common asapo-worker)
+target_link_libraries(${TARGET_NAME} test_common asapo-consumer)
 
 ################################
 # Testing
diff --git a/tests/automatic/worker/next_multithread_broker/check_linux.sh b/tests/automatic/consumer/next_multithread_broker/check_linux.sh
similarity index 100%
rename from tests/automatic/worker/next_multithread_broker/check_linux.sh
rename to tests/automatic/consumer/next_multithread_broker/check_linux.sh
diff --git a/tests/automatic/worker/next_multithread_broker/check_windows.bat b/tests/automatic/consumer/next_multithread_broker/check_windows.bat
similarity index 100%
rename from tests/automatic/worker/next_multithread_broker/check_windows.bat
rename to tests/automatic/consumer/next_multithread_broker/check_windows.bat
diff --git a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
similarity index 98%
rename from tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp
rename to tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
index 54291d7fc64c59b09bdfe13e5b6c9121684663b0..021507b53b8519ee2f354038b1d0a3fd2b00ca26 100644
--- a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp
+++ b/tests/automatic/consumer/next_multithread_broker/next_multithread_broker.cpp
@@ -2,7 +2,7 @@
 #include <vector>
 #include <thread>
 #include <algorithm>
-#include "worker/data_broker.h"
+#include "consumer/data_broker.h"
 #include "testing.h"
 
 using asapo::M_AssertEq;
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/CMakeLists.txt b/tests/automatic/curl_http_client/curl_http_client_command/CMakeLists.txt
index c52597f1ba80ad0367cdef0a9df3bf7b31f8abcd..94651979681b9fb6c0f2d4a577d5c307d6fd5fca 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/CMakeLists.txt
+++ b/tests/automatic/curl_http_client/curl_http_client_command/CMakeLists.txt
@@ -6,7 +6,7 @@ set(SOURCE_FILES curl_httpclient_command.cpp)
 # Executable and link
 ################################
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} test_common asapo-worker)
+target_link_libraries(${TARGET_NAME} test_common asapo-consumer)
 
 #set_target_properties(${TARGET_NAME} PROPERTIES LINKER_LANGUAGE CXX)
 #if (CMAKE_COMPILER_IS_GNUCXX)
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
index e77a9024d6d66ed1fe8c6567e82d7b2e59abbc3c..8fb1df7623077f4f0644fd0135bde61f85184906 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
+++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
@@ -1,8 +1,8 @@
 #include <iostream>
 #include <vector>
-#include "worker/data_broker.h"
+#include "consumer/data_broker.h"
 #include "testing.h"
-#include "../../../worker/api/cpp/src/server_data_broker.h"
+#include "../../../consumer/api/cpp/src/server_data_broker.h"
 
 using asapo::M_AssertEq;
 using asapo::M_AssertContains;
diff --git a/tests/automatic/full_chain/simple_chain/check_linux.sh b/tests/automatic/full_chain/simple_chain/check_linux.sh
index b350400129e716f6be2bce05d10ff226dd7adde0..581805ec3978d3223c6c6b891c1acf0c6aa70d64 100644
--- a/tests/automatic/full_chain/simple_chain/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain/check_linux.sh
@@ -30,6 +30,9 @@ Cleanup() {
 
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo ${beamtime_id}_detector
 
+echo "db.dropDatabase()" | mongo ${beamtime_id}_detector
+
+
 nomad run nginx.nmd
 nomad run authorizer.nmd
 nomad run receiver.nmd
diff --git a/tests/automatic/full_chain/simple_chain/check_windows.bat b/tests/automatic/full_chain/simple_chain/check_windows.bat
index 4e1959e26946825bfe83a72e468adcba39c1018a..06292ee28b6f3f5afd8a387802837d11e49192b3 100644
--- a/tests/automatic/full_chain/simple_chain/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain/check_windows.bat
@@ -25,7 +25,7 @@ mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id% 100 1000 4 0 100
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 5000  1 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 1000 file(s)"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat b/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
index 5df75776f5c10e3f84e4299faac4e238af162750..0d49c33bc160562575b619e2669e630fda1aefba 100644
--- a/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
@@ -25,7 +25,7 @@ mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id% 100 100 4 0 100 5
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 5000 1 1 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 100 dataset(s)"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
index 26c82bfe9eb3dd09c4ab264bc97b68e91fe7e7f4..370b73d063067c5d509f5c33ed847c99366ca44e 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
@@ -39,7 +39,7 @@ echo hello > c:\tmp\asapo\test_in\test2\file2
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000  1 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 3 file(s)"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
index 110f3fd0b5c3998f0a3802de8a5c79049d6b15e9..0c61e42c6deba79dba564c82283d73ae450d79d6 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
@@ -39,7 +39,7 @@ echo hello > c:\tmp\asapo\test_in\test2\file2
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 1 1 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 1 dataset(s)"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
index 8afceea409cc0f4141b6a1c2c22faeff58ac63c5..10a8fdc571dc2dc8649a7bb171ed8741a38357f4 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
@@ -45,7 +45,7 @@ echo hello > c:\tmp\asapo\test_in\test2\file2
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 1 1 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 2 dataset(s)"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
index c72e4822c04055731432675c11b9b424513e750a..f83190aa8ec10646c0166c6aa4dd5894d50e5259 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
@@ -39,7 +39,7 @@ echo hello3 > c:\tmp\asapo\test_in\test2\file2
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address%  %receiver_folder% %beamtime_id% 2 %token% 1000 0 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 3 file(s)" out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
index 34affc93cfd7026fb2866cbe75bc4dbed19cbfe6..b26d2359f225daf36464d3c8ed0f4396b3893c6a 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
@@ -39,7 +39,7 @@ echo hello3 > c:\tmp\asapo\test_in\test2\file2
 ping 1.0.0.0 -n 10 -w 100 > nul
 
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 1000 0 > out.txt
 type out.txt
 findstr /i /l /c:"Processed 3 file(s)" out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat b/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
index 3601898833ca4505a90f6248c803da60ce704db4..e6256f932f03fefb4399e259934220c74298b09f 100644
--- a/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
@@ -24,7 +24,7 @@ REM producer
 mkdir %receiver_folder%
 "%1" %proxy_address% %beamtime_id% 100 0 1 0 100
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id% 2 %token% 0  1 > out.txt
 type out.txt
 findstr /i /l /c:"dummy_meta"  out.txt || goto :error
diff --git a/tests/automatic/full_chain/simple_chain_usermeta_python/CMakeLists.txt b/tests/automatic/full_chain/simple_chain_usermeta_python/CMakeLists.txt
index e11f1a5e978ed7649333954ca20860c58bc825f0..abb01d4f6cce9ed9c9e31682177cc82a720c5257 100644
--- a/tests/automatic/full_chain/simple_chain_usermeta_python/CMakeLists.txt
+++ b/tests/automatic/full_chain/simple_chain_usermeta_python/CMakeLists.txt
@@ -8,7 +8,7 @@ prepare_asapo()
 if (UNIX)
     get_target_property(PYTHON_LIBS python-lib2 BINARY_DIR)
 else()
-    get_target_property(PYTHON_LIBS asapo_worker BINARY_DIR)
+    get_target_property(PYTHON_LIBS asapo_consumer BINARY_DIR)
 endif()
 
 
diff --git a/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat b/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
index a913528261b2df19797207e9f2d492d98db93f7e..c136540637c036254f993b0b8769afe801672bcf 100644
--- a/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
@@ -24,7 +24,7 @@ REM producer
 mkdir %receiver_folder%
 "%1" %proxy_address% %beamtime_id% 100 100 4 0 100
 
-REM worker
+REM consumer
 set PYTHONPATH=%4
 
 python3 %3/get_user_meta.py %proxy_address%  %receiver_folder% %beamtime_id%  %token% new > out
diff --git a/tests/automatic/full_chain/simple_chain_usermeta_python/get_user_meta.py b/tests/automatic/full_chain/simple_chain_usermeta_python/get_user_meta.py
index a89841dd6c78ac48ff83f5da69c6db39e6190af0..af8730388987c3e4d6357da00c332c82a33e2b50 100644
--- a/tests/automatic/full_chain/simple_chain_usermeta_python/get_user_meta.py
+++ b/tests/automatic/full_chain/simple_chain_usermeta_python/get_user_meta.py
@@ -1,21 +1,17 @@
 from __future__ import print_function
 
-import asapo_worker
+import asapo_consumer
 import json
 import sys
 
 source, path, beamtime, token, group_id = sys.argv[1:]
 
-broker, err = asapo_worker.create_server_broker(source,path, beamtime,"",token,1000)
+broker = asapo_consumer.create_server_broker(source,path, beamtime,"",token,1000)
 
+images = broker.query_images("meta.user_meta regexp 'test*' order by _id")
 
-images,err = broker.query_images("meta.user_meta regexp 'test*' order by _id")
-
-if err != None:
-    print ('err: ', err)
-else:
-    print ('found images:',len(images))
-    print (images[99]['meta']['user_meta'])
+print ('found images:',len(images))
+print (images[99]['meta']['user_meta'])
 
 
 
diff --git a/tests/automatic/full_chain/two_beamlines/check_linux.sh b/tests/automatic/full_chain/two_beamlines/check_linux.sh
index 2bc02dff355baa00183e0808938f2dd4185b0bf0..6fb9fd9181cefd28a9c1a9cd3397a3f06e58dc8f 100644
--- a/tests/automatic/full_chain/two_beamlines/check_linux.sh
+++ b/tests/automatic/full_chain/two_beamlines/check_linux.sh
@@ -53,6 +53,6 @@ $1 localhost:8400 ${beamtime_id1} 100 1000 4 0 100 &
 $1 localhost:8400 ${beamtime_id2} 100 900 4 0 100 &
 #producerid=`echo $!`
 
-#workers
+#consumers
 $2 ${proxy_address} ${receiver_folder1} ${beamtime_id1} 2 $token1 10000 0  | tee /dev/stderr | grep "Processed 1000 file(s)"
 $2 ${proxy_address} ${receiver_folder2} ${beamtime_id2} 2 $token2 10000 0 | tee /dev/stderr | grep "Processed 900 file(s)"
diff --git a/tests/automatic/full_chain/two_beamlines/check_windows.bat b/tests/automatic/full_chain/two_beamlines/check_windows.bat
index 26799acadf9dc0e458aad7669d2c9bf493282737..e70e50f841e89e11b5c04b2266522f4266968c60 100644
--- a/tests/automatic/full_chain/two_beamlines/check_windows.bat
+++ b/tests/automatic/full_chain/two_beamlines/check_windows.bat
@@ -35,7 +35,7 @@ start /B "" "%1" %proxy_address% %beamtime_id1% 100 1000 4 0 100
 start /B "" "%1" %proxy_address% %beamtime_id2% 100 900 4 0 100
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder1% %beamtime_id1% 2 %token1% 10000  0 > out1.txt
 type out1.txt
 findstr /i /l /c:"Processed 1000 file(s)"  out1.txt || goto :error
diff --git a/tests/automatic/full_chain/two_streams/check_linux.sh b/tests/automatic/full_chain/two_streams/check_linux.sh
index 060821551fde1c99de68a595d8eb62b3442c90ec..a1a6153efd0cfd027b4aaa5f2eb4369f5bd9e54d 100644
--- a/tests/automatic/full_chain/two_streams/check_linux.sh
+++ b/tests/automatic/full_chain/two_streams/check_linux.sh
@@ -46,6 +46,6 @@ $1 localhost:8400 ${beamtime_id}%${stream1} 100 1000 4 0 100 &
 $1 localhost:8400 ${beamtime_id}%${stream2} 100 900 4 0 100 &
 
 
-#workers
+#consumers
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id}%${stream1} 2 $token 10000 0  | tee /dev/stderr | grep "Processed 1000 file(s)"
 $2 ${proxy_address} ${receiver_folder} ${beamtime_id}%${stream2} 2 $token 10000 0 | tee /dev/stderr | grep "Processed 900 file(s)"
diff --git a/tests/automatic/full_chain/two_streams/check_windows.bat b/tests/automatic/full_chain/two_streams/check_windows.bat
index 9ab2ceffa343be1308f80e7b3a3d02c53e85b424..3bc90da7c238d441e0099d55dc063151e3ae51d9 100644
--- a/tests/automatic/full_chain/two_streams/check_windows.bat
+++ b/tests/automatic/full_chain/two_streams/check_windows.bat
@@ -30,7 +30,7 @@ start /B "" "%1" %proxy_address% %beamtime_id%%%%stream1% 100 1000 4 0 100
 start /B "" "%1" %proxy_address% %beamtime_id%%%%stream2% 100 900 4 0 100
 ping 1.0.0.0 -n 1 -w 100 > nul
 
-REM worker
+REM consumer
 "%2" %proxy_address% %receiver_folder% %beamtime_id%%%%stream1% 2 %token% 10000  0 > out1.txt
 type out1.txt
 findstr /i /l /c:"Processed 1000 file(s)"  out1.txt || goto :error
diff --git a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
index f8f9a763921735a77136281a3021d4b8e0b91b46..93204f1016010d698793b8afc7323a1b761142ab 100644
--- a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
@@ -51,7 +51,7 @@ start_mongo
 wait_mongo
 
 
-# create db before worker starts reading it. todo: git rid of it
+# create db before consumer starts reading it. todo: git rid of it
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo --port 27016 ${beamtime_id}_detector
 
 sed -i 's/27017/27016/g' discovery.json.tpl
diff --git a/tests/automatic/high_avail/services_restart/check_linux.sh b/tests/automatic/high_avail/services_restart/check_linux.sh
index c68d798463f82ea41c5c86a90061a6d203db8330..f295356e92aa5ef4e07ff04ffcadc251b8b628e0 100644
--- a/tests/automatic/high_avail/services_restart/check_linux.sh
+++ b/tests/automatic/high_avail/services_restart/check_linux.sh
@@ -42,7 +42,7 @@ $1 localhost:8400 ${beamtime_id} 100 $5 4 0 100 &
 #producerid=`echo $!`
 
 
-#worker
+#consumer
 $2 ${proxy_address} dummy_path ${beamtime_id} 2 $token 30000 1 &> output.txt &
 
 sleep 1
diff --git a/tests/automatic/json_parser/parse_config_file/CMakeLists.txt b/tests/automatic/json_parser/parse_config_file/CMakeLists.txt
index 10872462b4280234addbc9d7c48367b0d4b4d0f1..371e3517e54c3ee15b21f7de27be7205a0a67809 100644
--- a/tests/automatic/json_parser/parse_config_file/CMakeLists.txt
+++ b/tests/automatic/json_parser/parse_config_file/CMakeLists.txt
@@ -6,7 +6,7 @@ set(SOURCE_FILES parse_config_file.cpp)
 # Executable and link
 ################################
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} test_common asapo-worker)
+target_link_libraries(${TARGET_NAME} test_common asapo-consumer)
 target_include_directories(${TARGET_NAME} PUBLIC ${ASAPO_CXX_COMMON_INCLUDE_DIR})
 
 ################################
diff --git a/tests/automatic/producer/python_api/check_linux.sh b/tests/automatic/producer/python_api/check_linux.sh
index a3262ca32c1767f8db99bda8a9ab3b4653ffbc9b..af3fdd8e8c39a408d234d4ce0b25434567cbccc9 100644
--- a/tests/automatic/producer/python_api/check_linux.sh
+++ b/tests/automatic/producer/python_api/check_linux.sh
@@ -13,21 +13,21 @@ receiver_folder=${receiver_root_folder}/${beamline}/${beamtime_id}
 Cleanup() {
 	echo cleanup
 	rm -rf ${receiver_root_folder}
-    nomad stop receiver
-    nomad stop discovery
-    nomad stop authorizer
-    nomad stop nginx
-    echo "db.dropDatabase()" | mongo ${beamtime_id}_${stream}
+    nomad stop receiver >/dev/null
+    nomad stop discovery >/dev/null
+    nomad stop authorizer >/dev/null
+    nomad stop nginx >/dev/null
+    echo "db.dropDatabase()" | mongo ${beamtime_id}_${stream} >/dev/null
 }
 
 export PYTHONPATH=$2:${PYTHONPATH}
 
-echo "db.${beamtime_id}_${stream}.insert({dummy:1})" | mongo ${beamtime_id}_${stream}
+echo "db.${beamtime_id}_${stream}.insert({dummy:1})" | mongo ${beamtime_id}_${stream} >/dev/null
 
-nomad run authorizer.nmd
-nomad run nginx.nmd
-nomad run receiver.nmd
-nomad run discovery.nmd
+nomad run authorizer.nmd >/dev/null
+nomad run nginx.nmd >/dev/null
+nomad run receiver.nmd >/dev/null
+nomad run discovery.nmd >/dev/null
 
 mkdir -p ${receiver_folder}
 
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index 8e6ccb7427241941e25a59d64f0cfbd932f186f9..a83a6b5e42c6c2201b425a822f0298c343330f45 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -25,45 +25,47 @@ def callback(header,err):
         print ("successfuly sent: ",header)
     lock.release()
 
-def assert_err(err):
-    if err is not None:
-        print(err)
-        sys.exit(1)
+producer  = asapo_producer.create_producer(endpoint,beamtime, stream, token, nthreads)
 
-producer, err = asapo_producer.create_producer(endpoint,beamtime, stream, token, nthreads)
-assert_err(err)
 
 producer.set_log_level("info")
 
 #send single file
-err = producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
-assert_err(err)
+producer.send_file(1, local_path = "./file1", exposed_path = stream+"/"+"file1", user_meta = '{"test_key":"test_val"}', callback = callback)
 
 #send subsets
 producer.send_file(2, local_path = "./file1", exposed_path = stream+"/"+"file2",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
 producer.send_file(3, local_path = "./file1", exposed_path = stream+"/"+"file3",subset=(2,2),user_meta = '{"test_key":"test_val"}', callback = callback)
 
 #send meta only
-err = producer.send_file(3, local_path = "./not_exist",exposed_path = "./whatever",
+producer.send_file(3, local_path = "./not_exist",exposed_path = "./whatever",
                          ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
-assert_err(err)
 
 data = np.arange(10,dtype=np.float64)
 
 #send data from array
 err = producer.send_data(4, stream+"/"+"file5",data,
                          ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
-assert_err(err)
 
 #send data from string
 err = producer.send_data(5, stream+"/"+"file6",b"hello",
                          ingest_mode = asapo_producer.DEFAULT_INGEST_MODE, callback = callback)
-assert_err(err)
 
 #send metadata only
 err = producer.send_data(6, stream+"/"+"file7",None,
                          ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback)
-assert_err(err)
+
+
+# create with error
+try:
+    producer  = asapo_producer.create_producer(endpoint,beamtime, stream, token, 0)
+except Exception as e:
+    print(e)
+else:
+    print("should be error")
+    sys.exit(1)
+
+
 
 time.sleep(5)
 
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
index 0d3c46ef044b59b1be9409b80d9e95480b11ee08..8e7bde41349fe78330d12c13c72d5021d536962b 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_linux.sh
@@ -24,7 +24,7 @@ Cleanup() {
 echo "db.dropDatabase()" | mongo ${beamtime_id}_detector
 
 
-# create db before worker starts reading it. todo: git rid of it
+# create db before consumer starts reading it. todo: git rid of it
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo ${beamtime_id}_detector
 
 nomad run authorizer.nmd
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
index 2b44d752171c64de9df817f8015fd3fce0171f21..4f14e3c3a3d7842480a7d0a263512f381aefa6ae 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh
@@ -21,7 +21,7 @@ Cleanup() {
     influx -execute "drop database ${database_name}"
 }
 
-# create db before worker starts reading it. todo: git rid of it
+# create db before consumer starts reading it. todo: git rid of it
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo ${beamtime_id}_detector
 
 nomad run authorizer.nmd
diff --git a/tests/automatic/worker/connect_multithread/CMakeLists.txt b/tests/automatic/worker/connect_multithread/CMakeLists.txt
deleted file mode 100644
index bdc974e10b7c45b0eb303b5b242057c80ff3d9d3..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-set(TARGET_NAME content_multithread)
-set(SOURCE_FILES content_multithread.cpp)
-
-
-################################
-# Executable and link
-################################
-add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} test_common asapo-worker ${ASAPO_COMMON_IO_LIBRARIES})
-if (CMAKE_COMPILER_IS_GNUCXX)
-    set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_DEBUG "--coverage")
-endif()
-
-
-################################
-# Testing
-################################
-
-add_test_setup_cleanup(${TARGET_NAME})
-add_integration_test(${TARGET_NAME} connected "test 10")
-
diff --git a/tests/automatic/worker/connect_multithread/cleanup_linux.sh b/tests/automatic/worker/connect_multithread/cleanup_linux.sh
deleted file mode 100644
index b402b53d5454d24484eb66c05809dc02c7695b6d..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/cleanup_linux.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-rm -rf test
diff --git a/tests/automatic/worker/connect_multithread/cleanup_windows.bat b/tests/automatic/worker/connect_multithread/cleanup_windows.bat
deleted file mode 100644
index a11f20594f8db9314cc4fbf8cb21feebc0c9feb6..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/cleanup_windows.bat
+++ /dev/null
@@ -1 +0,0 @@
-rmdir /S /Q test
diff --git a/tests/automatic/worker/connect_multithread/content_multithread.cpp b/tests/automatic/worker/connect_multithread/content_multithread.cpp
deleted file mode 100644
index 070c9c8905cf65d6d27538bed26879781b12e553..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/content_multithread.cpp
+++ /dev/null
@@ -1,65 +0,0 @@
-#include <iostream>
-#include <vector>
-#include <thread>
-#include <algorithm>
-#include "worker/data_broker.h"
-#include "worker/worker_error.h"
-#include "testing.h"
-
-using asapo::M_AssertEq;
-
-void Assert(std::vector<asapo::Error>& errors, int nthreads) {
-    int count_ok = (int) std::count(std::begin(errors),
-                                    std::end(errors),
-                                    nullptr);
-
-    int count_already_connected = 0;
-    for (auto& error : errors) {
-        if (!error) continue;
-        if (error == asapo::WorkerErrorTemplates::kSourceAlreadyConnected)
-            count_already_connected++;
-    }
-
-    M_AssertEq(1, count_ok);
-    M_AssertEq(nthreads - 1, count_already_connected);
-}
-
-struct Args {
-    std::string folder;
-    int nthreads;
-};
-
-Args GetArgs(int argc, char* argv[]) {
-    if (argc != 3) {
-        std::cout << "Wrong number of arguments" << std::endl;
-        exit(EXIT_FAILURE);
-    }
-    std::string folder{argv[1]};
-    int nthreads = std::stoi(argv[2]);
-    return Args{folder, nthreads};
-}
-
-int main(int argc, char* argv[]) {
-
-    auto args = GetArgs(argc, argv);
-
-    asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateFolderBroker(args.folder, &err);
-
-    std::vector<asapo::Error>errors(args.nthreads);
-
-    std::vector<std::thread> threads;
-    for (int i = 0; i < args.nthreads; i++) {
-        threads.emplace_back(std::thread([&, i] {
-            errors[i] = broker->Connect();
-        }));
-    }
-
-    for (auto& thread : threads) {
-        thread.join();
-    }
-
-    Assert(errors, args.nthreads);
-
-    return 0;
-}
diff --git a/tests/automatic/worker/connect_multithread/setup_linux.sh b/tests/automatic/worker/connect_multithread/setup_linux.sh
deleted file mode 100644
index 7b3353007e20e34b97f89026cb1623c616def71f..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/setup_linux.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-
-mkdir -p test
-cd test
-touch 1 2 3 4 5 6 8 9 10
-
diff --git a/tests/automatic/worker/connect_multithread/setup_windows.bat b/tests/automatic/worker/connect_multithread/setup_windows.bat
deleted file mode 100644
index 7a4fb63dd89dc2b96d84ba37951c6d1e5919a220..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/connect_multithread/setup_windows.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-mkdir test
-for /l %%x in (1, 1, 10) do (
-   type nul > test\%%x
-)
-
diff --git a/tests/automatic/worker/next_multithread_folder/CMakeLists.txt b/tests/automatic/worker/next_multithread_folder/CMakeLists.txt
deleted file mode 100644
index 949b8c48c151837404d89b1b38f19f038c21cc0f..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/CMakeLists.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-set(TARGET_NAME next_multithread_folder)
-set(SOURCE_FILES next_multithread_folder.cpp)
-
-
-################################
-# Executable and link
-################################
-add_executable(${TARGET_NAME} ${SOURCE_FILES})
-
-#Add all necessary common libraries
-GET_PROPERTY(ASAPO_COMMON_IO_LIBRARIES GLOBAL PROPERTY ASAPO_COMMON_IO_LIBRARIES)
-target_link_libraries(${TARGET_NAME} ${ASAPO_COMMON_IO_LIBRARIES})
-
-target_link_libraries(${TARGET_NAME} test_common asapo-worker)
-if (CMAKE_COMPILER_IS_GNUCXX)
-    set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS_DEBUG "--coverage")
-endif()
-
-
-################################
-# Testing
-################################
-
-add_test_setup_cleanup(${TARGET_NAME})
-add_integration_test(${TARGET_NAME} readnext "test 50 500" "test 50 1")
-
diff --git a/tests/automatic/worker/next_multithread_folder/cleanup_linux.sh b/tests/automatic/worker/next_multithread_folder/cleanup_linux.sh
deleted file mode 100644
index b402b53d5454d24484eb66c05809dc02c7695b6d..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/cleanup_linux.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-rm -rf test
diff --git a/tests/automatic/worker/next_multithread_folder/cleanup_windows.bat b/tests/automatic/worker/next_multithread_folder/cleanup_windows.bat
deleted file mode 100644
index a11f20594f8db9314cc4fbf8cb21feebc0c9feb6..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/cleanup_windows.bat
+++ /dev/null
@@ -1 +0,0 @@
-rmdir /S /Q test
diff --git a/tests/automatic/worker/next_multithread_folder/next_multithread_folder.cpp b/tests/automatic/worker/next_multithread_folder/next_multithread_folder.cpp
deleted file mode 100644
index f9e4930eaa5d123e2017359f76a8506ccf4d5a89..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/next_multithread_folder.cpp
+++ /dev/null
@@ -1,70 +0,0 @@
-#include <iostream>
-#include <vector>
-#include <thread>
-#include <algorithm>
-#include "worker/data_broker.h"
-#include "testing.h"
-
-using asapo::M_AssertEq;
-using asapo::M_AssertTrue;
-
-void Assert(std::vector<asapo::FileInfo> file_infos, int nthreads) {
-    int nfiles = (int) file_infos.size();
-    M_AssertEq(nthreads, nfiles);
-
-    std::vector<std::string> expect, result;
-    for (int i = 0; i < nthreads; i++) {
-        expect.push_back(std::to_string(i));
-        result.push_back(file_infos[i].name);
-    }
-    // file names created by setup.sh should be '0','1',... Each thread should access different file.
-    M_AssertTrue(std::is_permutation(expect.begin(), expect.end(), result.begin()));
-}
-
-struct Args {
-    std::string folder;
-    int nthreads;
-    int nattempts;
-};
-
-Args GetArgs(int argc, char* argv[]) {
-    if (argc != 4) {
-        std::cout << "Wrong number of arguments" << std::endl;
-        exit(EXIT_FAILURE);
-    }
-    std::string folder{argv[1]};
-    int nthreads = std::stoi(argv[2]);
-    int nattempts = std::stoi(argv[3]);
-    return Args{folder, nthreads, nattempts};
-}
-
-void TestAll(const Args& args) {
-    asapo::Error err;
-    auto broker = asapo::DataBrokerFactory::CreateFolderBroker(args.folder, &err);
-    broker->Connect();
-
-    std::vector<asapo::FileInfo>file_infos(args.nthreads);
-    auto exec_next = [&](int i) {
-        broker->GetNext(&file_infos[i], "", nullptr);
-    };
-
-    std::vector<std::thread> threads;
-    for (int i = 0; i < args.nthreads; i++) {
-        threads.emplace_back(std::thread(exec_next, i));
-    }
-
-    for (auto& thread : threads) {
-        thread.join();
-    }
-    Assert(file_infos, args.nthreads);
-}
-
-int main(int argc, char* argv[]) {
-
-    auto args = GetArgs(argc, argv);
-
-    for (int nattempt = 0; nattempt < args.nattempts; nattempt++) {
-        TestAll(args);
-    }
-    return 0;
-}
diff --git a/tests/automatic/worker/next_multithread_folder/setup_linux.sh b/tests/automatic/worker/next_multithread_folder/setup_linux.sh
deleted file mode 100644
index 0ce6625eb935428909869fd2dad55bc2f1816216..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/setup_linux.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-mkdir -p test
-cd test
-for i in `seq 0 49`;
-do
-    echo $i > $i
-done
diff --git a/tests/automatic/worker/next_multithread_folder/setup_windows.bat b/tests/automatic/worker/next_multithread_folder/setup_windows.bat
deleted file mode 100644
index 6806f381ac8d88d2666dbe43d85722e4ffa8912d..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/next_multithread_folder/setup_windows.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-mkdir test
-for /l %%x in (0, 1, 49) do (
-   echo %%x > test\%%x
-)
-
diff --git a/tests/automatic/worker/worker_api_python/worker_api.py b/tests/automatic/worker/worker_api_python/worker_api.py
deleted file mode 100644
index c7086e9b3db9c20737597e56ba782ea6aabc6cd0..0000000000000000000000000000000000000000
--- a/tests/automatic/worker/worker_api_python/worker_api.py
+++ /dev/null
@@ -1,157 +0,0 @@
-from __future__ import print_function
-
-import asapo_worker
-import json
-import sys
-
-def assert_noterr(err, name):
-    if err != None:
-        print (name + ' err: ', err)
-        sys.exit(1)
-
-def assert_err(err, name):
-    if err == None:
-        print (name + ' err: ', err)
-        sys.exit(1)
-
-
-def assert_metaname(meta,compare,name):
-    if meta['name'] != compare:
-        print ("error at "+name)
-        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
-        sys.exit(1)
-
-def assert_usermetadata(meta,name):
-    if meta['meta']['test'] != 10:
-        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
-        print ("error at "+name)
-        print ('meta: ', json.dumps(meta, indent=4, sort_keys=True))
-        sys.exit(1)
-
-
-def assert_eq(val,expected,name):
-    if val != expected:
-        print ("error at "+name)
-        print ('val: ', val,' expected: ',expected)
-        sys.exit(1)
-
-def check_single(broker,group_id_new):
-    _, meta, err = broker.get_next(group_id_new, meta_only=True)
-    assert_noterr(err, "get_next")
-    assert_metaname(meta,"1","get next1")
-    assert_usermetadata(meta,"get next1")
-
-    data, err = broker.retrieve_data(meta)
-    assert_eq(data.tostring().decode("utf-8"),"hello1","retrieve_data data")
-    assert_noterr(err, "retrieve_data err")
-
-    _, meta, err = broker.get_next(group_id_new, meta_only=True)
-    assert_noterr(err, "get_next2")
-    assert_metaname(meta,"2","get next2")
-    assert_usermetadata(meta,"get next2")
-
-    _, meta, err = broker.get_last(group_id_new, meta_only=True)
-    assert_noterr(err, "get_last1")
-    assert_metaname(meta,"5","get last1")
-    assert_usermetadata(meta,"get last1")
-
-    _, meta, err = broker.get_next(group_id_new, meta_only=True)
-    assert_err(err, "get_next3")
-
-    size,err = broker.get_ndatasets()
-    assert_noterr(err, "get_ndatasets")
-    assert_eq(size,5,"get_ndatasets")
-
-
-    err = broker.reset_counter(group_id_new)
-    assert_noterr(err, "reset_counter")
-
-    _, meta, err = broker.get_next(group_id_new, meta_only=True)
-    assert_noterr(err, "get_next4")
-    assert_metaname(meta,"1","get next4")
-    assert_usermetadata(meta,"get next4")
-
-
-    _, meta, err = broker.get_by_id(3, group_id_new, meta_only=True)
-    assert_noterr(err, "get_by_id")
-    assert_metaname(meta,"3","get get_by_id")
-    assert_usermetadata(meta,"get get_by_id")
-
-    _, meta, err = broker.get_next(group_id_new, meta_only=True)
-    assert_noterr(err, "get_next5")
-    assert_metaname(meta,"4","get next5")
-    assert_usermetadata(meta,"get next5")
-
-
-    images,err = broker.query_images("meta.test = 10")
-    assert_noterr(err, "query1")
-    assert_eq(len(images),5,"size of query answer 1")
-    for image in images:
-        assert_usermetadata(image,"query_images")
-
-
-    images,err = broker.query_images("meta.test = 10 AND name='1'")
-    assert_eq(len(images),1,"size of query answer 2 ")
-    assert_noterr(err, "query2")
-
-    for image in images:
-        assert_usermetadata(image,"query_images")
-
-    images,err = broker.query_images("meta.test = 11")
-    assert_eq(len(images),0,"size of query answer 3 ")
-    assert_noterr(err, "query3")
-
-    images,err = broker.query_images("bla")
-    assert_err(err, "wrong query")
-
-def check_dataset(broker,group_id_new):
-    id, metas, err = broker.get_next_dataset(group_id_new)
-    assert_noterr(err, "get_next_dataset1")
-    assert_eq(id,1,"get_next_dataset1")
-    assert_metaname(metas[0],"1_1","get nextdataset1 name1")
-    assert_metaname(metas[1],"1_2","get nextdataset1 name2")
-    assert_usermetadata(metas[0],"get nextdataset1 meta")
-
-    data, err = broker.retrieve_data(metas[0])
-    assert_eq(data.tostring().decode("utf-8"),"hello1","retrieve_data from dataset data")
-    assert_noterr(err, "retrieve_data from dataset err")
-
-
-    id, metas, err = broker.get_next_dataset(group_id_new)
-    assert_noterr(err, "get_next_dataset2")
-    assert_eq(id,2,"get_next_dataset2")
-    assert_metaname(metas[0],"2_1","get nextdataset2 name1")
-
-    id, metas, err = broker.get_last_dataset(group_id_new)
-    assert_noterr(err, "get_last_dataset1")
-    assert_eq(id,10,"get_last_dataset1")
-    assert_metaname(metas[2],"10_3","get get_last_dataset1 name3")
-
-    id, metas, err = broker.get_next_dataset(group_id_new)
-    assert_eq(id,None,"get_next_dataset3 id")
-    assert_eq(id,metas,"get_next_dataset3 metas")
-    assert_err(err, "get_next_dataset3 err")
-
-    id, metas, err = broker.get_dataset_by_id(8,group_id_new)
-    assert_eq(id,8,"get_dataset_by_id1 id")
-    assert_noterr(err, "get_dataset_by_id1 err")
-    assert_metaname(metas[2],"8_3","get get_dataset_by_id1 name3")
-
-    id, metas, err = broker.get_next_dataset(group_id_new)
-    assert_eq(id,9,"get_next_dataset4 id")
-
-
-source, path, beamtime, token, mode = sys.argv[1:]
-
-broker, err = asapo_worker.create_server_broker(source,path, beamtime,"",token,1000)
-
-group_id_new, err = broker.generate_group_id()
-assert_noterr(err, "generate_group")
-
-
-if mode == "single":
-    check_single(broker,group_id_new)
-
-if mode == "datasets":
-    check_dataset(broker,group_id_new)
-
diff --git a/tests/manual/performance_broker/test.sh b/tests/manual/performance_broker/test.sh
index d1685e15ec3e78b8a0265ecb03096520d87c04e5..1091938f10fcb0e655306e4cbe1a5e36450a81b2 100755
--- a/tests/manual/performance_broker/test.sh
+++ b/tests/manual/performance_broker/test.sh
@@ -2,7 +2,7 @@
 
 # starts broker, mongodb on $service_node
 # reads fileset into database
-# calls getnext_broker example from $worker_node
+# calls getnext_broker example from $consumer_node
 
 nthreads=1
 # a directory with many files in it
@@ -15,10 +15,10 @@ service_node=max-wgs
 monitor_node=zitpcx27016
 monitor_port=8086
 
-worker_node=max-display
-#worker_node=max-wgs
+consumer_node=max-display
+#consumer_node=max-wgs
 
-worker_dir=~/broker_test
+consumer_dir=~/broker_test
 service_dir=~/broker_test
 
 
@@ -33,7 +33,7 @@ ssh ${service_node} docker run -d -p 27017:27017 --name mongo mongo
 #ssh ${service_node} docker run -d -p 8086 -p 8086 --name influxdb influxdb
 
 ssh ${service_node} mkdir -p ${service_dir}
-ssh ${worker_node} mkdir -p ${worker_dir}
+ssh ${consumer_node} mkdir -p ${consumer_dir}
 
 
 scp ../../../cmake-build-release/discovery/asapo-discovery ${service_node}:${service_dir}
@@ -53,13 +53,13 @@ rm settings_tmp.json
 scp ../../../cmake-build-release/broker/asapo-broker ${service_node}:${service_dir}
 ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./asapo-broker -config settings.json &> ${service_dir}/broker.log &'"
 sleep 0.3
-scp ../../../cmake-build-release/worker/tools/folder_to_db/folder2db ${worker_node}:${worker_dir}
-ssh ${worker_node} ${worker_dir}/folder2db -n ${nthreads} ${dir} ${run_name} ${service_node}
+scp ../../../cmake-build-release/consumer/tools/folder_to_db/folder2db ${consumer_node}:${consumer_dir}
+ssh ${consumer_node} ${consumer_dir}/folder2db -n ${nthreads} ${dir} ${run_name} ${service_node}
 
 sleep 3
 
-scp ../../../cmake-build-release/examples/worker/getnext_broker/getnext_broker ${worker_node}:${worker_dir}
-ssh ${worker_node} ${worker_dir}/getnext_broker ${service_node}:8400 ${run_name} ${nthreads} $token
+scp ../../../cmake-build-release/examples/consumer/getnext_broker/getnext_broker ${consumer_node}:${consumer_dir}
+ssh ${consumer_node} ${consumer_dir}/getnext_broker ${service_node}:8400 ${run_name} ${nthreads} $token
 
 
 
diff --git a/tests/manual/performance_broker_receiver/CMakeLists.txt b/tests/manual/performance_broker_receiver/CMakeLists.txt
index 40714bf99702539d450e5245c8d3bae8a9f6e2cd..aee1d949e531283ba228feb2206593f37738e04d 100644
--- a/tests/manual/performance_broker_receiver/CMakeLists.txt
+++ b/tests/manual/performance_broker_receiver/CMakeLists.txt
@@ -3,7 +3,7 @@ set(SOURCE_FILES getlast_broker.cpp)
 
 
 add_executable(${TARGET_NAME} ${SOURCE_FILES})
-target_link_libraries(${TARGET_NAME} asapo-worker)
+target_link_libraries(${TARGET_NAME} asapo-consumer)
 
 #use expression generator to get rid of VS adding Debug/Release folders
 set_target_properties(${TARGET_NAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
diff --git a/tests/manual/performance_broker_receiver/getlast_broker.cpp b/tests/manual/performance_broker_receiver/getlast_broker.cpp
index 74ef2c9b1f2e1c8f41060902ef9b485cc4c078f8..8aaa9d9aa122ecf7faa9ba521ed63a1229328a27 100644
--- a/tests/manual/performance_broker_receiver/getlast_broker.cpp
+++ b/tests/manual/performance_broker_receiver/getlast_broker.cpp
@@ -8,7 +8,7 @@
 #include <numeric>
 #include <mutex>
 
-#include "asapo_worker.h"
+#include "asapo_consumer.h"
 
 using std::chrono::system_clock;
 using asapo::Error;
@@ -36,7 +36,7 @@ void WaitThreads(std::vector<std::thread>* threads) {
 int ProcessError(const Error& err) {
     if (err == nullptr) return 0;
     std::cout << err->Explain() << std::endl;
-    return err == asapo::IOErrorTemplates::kTimeout ? 0 : 1;
+    return err == asapo::ConsumerErrorTemplates::kEndOfStream ? 0 : 1;
 }
 
 std::vector<std::thread> StartThreads(const Args& params,
@@ -87,7 +87,7 @@ std::vector<std::thread> StartThreads(const Args& params,
             }
             if (err) {
                 (*errors)[i] += ProcessError(err);
-                if (err == asapo::IOErrorTemplates::kTimeout) {
+                if (err == asapo::ConsumerErrorTemplates::kEndOfStream) {
                     break;
                 }
             }
diff --git a/tests/manual/performance_full_chain_simple/test.sh b/tests/manual/performance_full_chain_simple/test.sh
index 52add6c8d0234fa2b7fda0b72c5c99fac3a99960..733b99f2c7eb8cad90f14484817fe5f243d9dffe 100755
--- a/tests/manual/performance_full_chain_simple/test.sh
+++ b/tests/manual/performance_full_chain_simple/test.sh
@@ -107,13 +107,13 @@ rm settings_tmp.json
 scp ../../../cmake-build-release/broker/asapo-broker ${broker_node}:${broker_dir}
 
 
-#worker_setup
-worker_node=max-display002
-worker_dir=~/fullchain_tests
+#consumer_setup
+consumer_node=max-display002
+consumer_dir=~/fullchain_tests
 nthreads=16
-scp ../../../cmake-build-release/examples/worker/getnext_broker/getnext_broker ${worker_node}:${worker_dir}
-scp ../../../cmake-build-release/asapo_tools/asapo ${worker_node}:${worker_dir}
-scp ../../../tests/automatic/settings/auth_secret.key ${worker_node}:${worker_dir}/auth_secret.key
+scp ../../../cmake-build-release/examples/consumer/getnext_broker/getnext_broker ${consumer_node}:${consumer_dir}
+scp ../../../cmake-build-release/asapo_tools/asapo ${consumer_node}:${consumer_dir}
+scp ../../../tests/automatic/settings/auth_secret.key ${consumer_node}:${consumer_dir}/auth_secret.key
 
 #monitoring_start
 #ssh ${monitor_node} docker run -d -p 8086 -p 8086 --name influxdb influxdb
@@ -144,8 +144,8 @@ ssh ${producer_node} "bash -c 'cd ${producer_dir}; nohup ./dummy-data-producer $
 sleep 1
 
 #prepare token
-ssh ${worker_node} "bash -c '${worker_dir}/asapo token -secret ${worker_dir}/auth_secret.key ${beamtime_id} >${worker_dir}/token'"
-#worker_start
-ssh ${worker_node} "bash -c '${worker_dir}/getnext_broker ${receiver_node}:8400 ${beamtime_id} ${nthreads} \`cat ${worker_dir}/token\`'"
+ssh ${consumer_node} "bash -c '${consumer_dir}/asapo token -secret ${consumer_dir}/auth_secret.key ${beamtime_id} >${consumer_dir}/token'"
+#consumer_start
+ssh ${consumer_node} "bash -c '${consumer_dir}/getnext_broker ${receiver_node}:8400 ${beamtime_id} ${nthreads} \`cat ${consumer_dir}/token\`'"
 
 
diff --git a/tests/manual/performance_producer_receiver/test.sh b/tests/manual/performance_producer_receiver/test.sh
index 251f9d18a14bc1e304f1052f045ccadd4904782a..aefb44b8f17776dccdbfe1acbfd8a63764620ffe 100755
--- a/tests/manual/performance_producer_receiver/test.sh
+++ b/tests/manual/performance_producer_receiver/test.sh
@@ -15,7 +15,7 @@ ssh ${service_node} docker rm -f -v mongo
 
 
 # starts receiver on $service_node
-# runs producer with various file sizes from $worker_node and measures performance
+# runs producer with various file sizes from $consumer_node and measures performance
 
 # a working directory
 service_node=max-wgs
@@ -29,10 +29,10 @@ beamline=test
 monitor_node=zitpcx27016
 monitor_port=8086
 
-worker_node=max-display
-#worker_node=max-wgs
+consumer_node=max-display
+#consumer_node=max-wgs
 
-worker_dir=~/producer_tests
+consumer_dir=~/producer_tests
 service_dir=/gpfs/petra3/scratch/yakubov/receiver_tests
 
 ssh ${monitor_node} influx -execute \"create database db_test\"
@@ -41,7 +41,7 @@ ssh ${monitor_node} influx -execute \"create database db_test\"
 
 ssh ${service_node} mkdir -p ${service_dir}
 ssh ${service_node} mkdir -p ${service_dir}/files/${beamtime_id}
-ssh ${worker_node} mkdir -p ${worker_dir}
+ssh ${consumer_node} mkdir -p ${consumer_dir}
 
 scp ../../../cmake-build-release/receiver/receiver ${service_node}:${service_dir}
 scp ../../../cmake-build-release/discovery/asapo-discovery ${service_node}:${service_dir}
@@ -49,7 +49,7 @@ scp ../../../cmake-build-release/discovery/asapo-discovery ${service_node}:${ser
 scp ../../../cmake-build-release/authorizer/asapo-authorizer ${service_node}:${service_dir}
 scp authorizer.json ${service_node}:${service_dir}/authorizer.json
 
-scp ../../../cmake-build-release/examples/producer/dummy-data-producer/dummy-data-producer ${worker_node}:${worker_dir}
+scp ../../../cmake-build-release/examples/producer/dummy-data-producer/dummy-data-producer ${consumer_node}:${consumer_dir}
 
 function do_work {
 cat receiver.json |
@@ -93,7 +93,7 @@ for size  in 100 1000 10000
 do
 ssh ${service_node} docker run -d -p 27017:27017 --name mongo mongo
 echo ===================================================================
-ssh ${worker_node} ${worker_dir}/dummy-data-producer ${service_ip}:8400 ${beamtime_id} ${size} 10000 8 0 100
+ssh ${consumer_node} ${consumer_dir}/dummy-data-producer ${service_ip}:8400 ${beamtime_id} ${size} 10000 8 0 100
 if [ "$1" == "true" ]
 then
     ssh ${service_node} rm -f ${service_dir}/files/${beamline}/${beamtime_id}/*
diff --git a/tests/manual/python_tests/ASAPO-csb.ipynb b/tests/manual/python_tests/ASAPO-csb.ipynb
index a34d13b9aead06b45535c38054e0c3914a2878d4..962c72374b7a247460b6b02084686d7498026729 100644
--- a/tests/manual/python_tests/ASAPO-csb.ipynb
+++ b/tests/manual/python_tests/ASAPO-csb.ipynb
@@ -15,7 +15,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "import asapo_worker\n",
+    "import asapo_consumer\n",
     "import h5py\n",
     "import numpy as np\n",
     "import matplotlib.pyplot as plt\n",
@@ -24,7 +24,7 @@
     "import time\n",
     "from IPython import display\n",
     "\n",
-    "broker, err = asapo_worker.create_server_broker(\"psana002:8400\", \"/tmp\", \"asapo_test2\",\"\",\"yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=\", 1000000)\n"
+    "broker, err = asapo_consumer.create_server_broker(\"psana002:8400\", \"/tmp\", \"asapo_test2\",\"\",\"yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=\", 1000000)\n"
    ]
   },
   {
diff --git a/tests/manual/python_tests/plot_images_online.py b/tests/manual/python_tests/plot_images_online.py
index 70fd845956ff7f40f51d0df637d199e5c584e481..829200981948f473083b41ce4bca327306cefa30 100644
--- a/tests/manual/python_tests/plot_images_online.py
+++ b/tests/manual/python_tests/plot_images_online.py
@@ -1,4 +1,4 @@
-import asapo_worker
+import asapo_consumer
 import h5py
 import numpy as np
 import matplotlib.pyplot as plt
@@ -9,7 +9,7 @@ import matplotlib.pyplot as plt
 #dset = f.create_dataset("mydataset", data = d1)
 #f.close()
 
-broker, err = asapo_worker.create_server_broker("psana002:8400", "/tmp", "asapo_test2","","yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=", 1000000)
+broker, err = asapo_consumer.create_server_broker("psana002:8400", "/tmp", "asapo_test2","","yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=", 1000000)
 
 last_id = 0
 while True:
diff --git a/tests/manual/python_tests/test_p.py b/tests/manual/python_tests/test_p.py
index 9000bcd7beebcd1f66d41d2a29f84edd39bb01cb..337cac212e868892737fcd60258076b9e890361b 100644
--- a/tests/manual/python_tests/test_p.py
+++ b/tests/manual/python_tests/test_p.py
@@ -1,6 +1,6 @@
 from __future__ import print_function
 
-import asapo_worker
+import asapo_consumer
 import sys
 import json
 import time
@@ -10,7 +10,7 @@ path = "/asapo_shared/asapo/data"
 beamtime = "asapo_test"
 token = "KmUDdacgBzaOD3NIJvN1NmKGqWKtx0DK-NyPjdpeWkc="
 
-broker, err = asapo_worker.create_server_broker(
+broker, err = asapo_consumer.create_server_broker(
     source, path, beamtime, token, 1000)
 
 group_id, err = broker.generate_group_id()
diff --git a/tests/manual/tests_via_nomad/asapo-test_dummy_producer.nomad.in b/tests/manual/tests_via_nomad/asapo-test_dummy_producer.nomad.in
index 89a8aad5d7c249fa11f7be7b1ced59851dde19f8..e5d995789aa800cb501f2814468c0ed5ff046a30 100644
--- a/tests/manual/tests_via_nomad/asapo-test_dummy_producer.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_dummy_producer.nomad.in
@@ -92,7 +92,7 @@ job "asapo-test" {
   }
   #linux
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     constraint {
       attribute = "${attr.kernel.name}"
@@ -107,12 +107,12 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
 
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -127,7 +127,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 
       resources {
@@ -146,10 +146,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     constraint {
       attribute = "${attr.kernel.name}"
@@ -164,12 +164,12 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
 
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -184,7 +184,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
       resources {
         cpu = 5000
@@ -205,4 +205,4 @@ job "asapo-test" {
   }
 
 }
-# worker-linux2
+# consumer-linux2
diff --git a/tests/manual/tests_via_nomad/asapo-test_dummy_producer_only.nomad.in b/tests/manual/tests_via_nomad/asapo-test_dummy_producer_only.nomad.in
index 0effcca2a01a2244364f6eaf6b0550c5b43f5e0c..57c027964539cbb49a392d1f10056fb299d77b20 100644
--- a/tests/manual/tests_via_nomad/asapo-test_dummy_producer_only.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_dummy_producer_only.nomad.in
@@ -93,7 +93,7 @@ job "asapo-test" {
   }
   #linux
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     constraint {
       attribute = "${attr.kernel.name}"
@@ -109,12 +109,12 @@ job "asapo-test" {
     count = 0
 
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
 
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -129,7 +129,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 
       resources {
@@ -148,10 +148,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     constraint {
       attribute = "${attr.kernel.name}"
@@ -167,12 +167,12 @@ job "asapo-test" {
 
 	count = 0
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
 
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -187,7 +187,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
       resources {
         cpu = 5000
@@ -208,4 +208,4 @@ job "asapo-test" {
   }
 
 }
-# worker-linux2
+# consumer-linux2
diff --git a/tests/manual/tests_via_nomad/asapo-test_filegen_worker.nomad.in b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer.nomad.in
similarity index 89%
rename from tests/manual/tests_via_nomad/asapo-test_filegen_worker.nomad.in
rename to tests/manual/tests_via_nomad/asapo-test_filegen_consumer.nomad.in
index 3fdb7cec5b10e85d176378be474ca91ce07be889..6aab82d0c2380fd5034e9411e8dcecf294cd61e7 100644
--- a/tests/manual/tests_via_nomad/asapo-test_filegen_worker.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer.nomad.in
@@ -74,7 +74,7 @@ job "asapo-test" {
   }
 
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     restart {
       attempts = 0
@@ -94,11 +94,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -113,7 +113,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 
 #      resources {
@@ -132,10 +132,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     restart {
       attempts = 0
@@ -155,11 +155,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -174,7 +174,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 #      resources {
 #        cpu = 5000
@@ -192,6 +192,6 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux2
+  # consumer-linux2
 
 }
diff --git a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M.nomad.in b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M.nomad.in
similarity index 89%
rename from tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M.nomad.in
rename to tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M.nomad.in
index 1365fd87c9c47e3ce2910ff226fba5b99fc72624..82e24cdf6d2ef7d6fcd3ed36f714be10f17ee476 100644
--- a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M.nomad.in
@@ -74,7 +74,7 @@ job "asapo-test" {
   }
 
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     restart {
       attempts = 0
@@ -94,11 +94,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -113,7 +113,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 
 #      resources {
@@ -132,10 +132,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     restart {
       attempts = 0
@@ -155,11 +155,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -174,7 +174,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}"]
+          "${CONSUMER_READ_META_ONLY}"]
       }
 #      resources {
 #        cpu = 5000
@@ -192,6 +192,6 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux2
+  # consumer-linux2
 
 }
diff --git a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_batch.nomad.in b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_batch.nomad.in
similarity index 89%
rename from tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_batch.nomad.in
rename to tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_batch.nomad.in
index 2ce7d38242e70bd5ce9ba73f651ca04edbe339ce..4a92a5b1c5bd48d21a7dc9973c00346d92befb42 100644
--- a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_batch.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_batch.nomad.in
@@ -74,7 +74,7 @@ job "asapo-test" {
   }
 
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     restart {
       attempts = 0
@@ -94,11 +94,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -113,7 +113,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}",
+          "${CONSUMER_READ_META_ONLY}",
           "1"]
       }
 
@@ -133,10 +133,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     restart {
       attempts = 0
@@ -156,11 +156,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -175,7 +175,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}",
+          "${CONSUMER_READ_META_ONLY}",
           "1"]
       }
 #      resources {
@@ -194,6 +194,6 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux2
+  # consumer-linux2
 
 }
diff --git a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_multisource.nomad.in b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_multisource.nomad.in
similarity index 92%
rename from tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_multisource.nomad.in
rename to tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_multisource.nomad.in
index 26623e83bf7473b7e0292ef3b68b8c5d2f716e38..2707b362534e15f222008d107568ab711fc21304 100644
--- a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_1M_multisource.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_1M_multisource.nomad.in
@@ -74,7 +74,7 @@ job "asapo-test" {
   }
 
 
-  group "worker" {
+  group "consumer" {
 
     restart {
       attempts = 0
@@ -94,11 +94,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -113,7 +113,7 @@ job "asapo-test" {
           "16",
           "KmUDdacgBzaOD3NIJvN1NmKGqWKtx0DK-NyPjdpeWkc=",
           "30000",
-          "${WORKER_READ_META_ONLY}",
+          "${CONSUMER_READ_META_ONLY}",
           "1"]
       }
 
@@ -133,6 +133,6 @@ job "asapo-test" {
     }
 
   }
-  # worker
+  # consumer
 
 }
diff --git a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_batch.nomad.in b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_batch.nomad.in
similarity index 89%
rename from tests/manual/tests_via_nomad/asapo-test_filegen_worker_batch.nomad.in
rename to tests/manual/tests_via_nomad/asapo-test_filegen_consumer_batch.nomad.in
index 1298b228b1cfe5713c7c68f73f782cb76ec277b0..4303b56d2637b5da4a3a5e61c3a49afc4c095596 100644
--- a/tests/manual/tests_via_nomad/asapo-test_filegen_worker_batch.nomad.in
+++ b/tests/manual/tests_via_nomad/asapo-test_filegen_consumer_batch.nomad.in
@@ -74,7 +74,7 @@ job "asapo-test" {
   }
 
 
-  group "worker-linux1" {
+  group "consumer-linux1" {
 
     restart {
       attempts = 0
@@ -94,11 +94,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -113,7 +113,7 @@ job "asapo-test" {
           "16",
           "oTsKsj8i6WcW_gVzeIFvZCtSfMErjDELJEyAI23n7Ik=",
           "30000",
-          "${WORKER_READ_META_ONLY}",
+          "${CONSUMER_READ_META_ONLY}",
           "1"]
       }
 
@@ -133,10 +133,10 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux1
+  # consumer-linux1
 
 
-  group "worker-linux2" {
+  group "consumer-linux2" {
 
     restart {
       attempts = 0
@@ -156,11 +156,11 @@ job "asapo-test" {
 
     count = 1
 
-    task "worker-linux" {
+    task "consumer-linux" {
       driver = "raw_exec"
     template {
      data = <<EOH
-        WORKER_READ_META_ONLY = "{{ keyOrDefault "worker_read_meta_only" "1" }}"
+        CONSUMER_READ_META_ONLY = "{{ keyOrDefault "consumer_read_meta_only" "1" }}"
         EOH
      destination = "secrets/file.env"
      env         = true
@@ -175,7 +175,7 @@ job "asapo-test" {
           "16",
           "yzgAcLmijSLWIm8dBiGNCbc0i42u5HSm-zR6FRqo__Y=",
           "30000",
-          "${WORKER_READ_META_ONLY}",
+          "${CONSUMER_READ_META_ONLY}",
           "1"]
       }
 #      resources {
@@ -194,6 +194,6 @@ job "asapo-test" {
     }
 
   }
-  # worker-linux2
+  # consumer-linux2
 
 }
diff --git a/tests/manual/tests_via_nomad/run_test_fullchain.sh b/tests/manual/tests_via_nomad/run_test_fullchain.sh
index 9f842d002cbd45639a67c4501126051741e0fb81..3c9149fe211805d4dd029d4921df4f20aaa4c936 100755
--- a/tests/manual/tests_via_nomad/run_test_fullchain.sh
+++ b/tests/manual/tests_via_nomad/run_test_fullchain.sh
@@ -8,5 +8,5 @@ sleep 1
 nomad stop asapo-test
 . ./clean_after_tests.sh
 
-nomad run asapo-test_filegen_worker.nomad
+nomad run asapo-test_filegen_consumer.nomad
 
diff --git a/tests/manual/tests_via_nomad/run_test_fullchain_batch.sh b/tests/manual/tests_via_nomad/run_test_fullchain_batch.sh
index 459aa9f665b846bd9b2d9aa079786081650bd552..af34de80967216d94af898bebbbfcd9f7c59f537 100755
--- a/tests/manual/tests_via_nomad/run_test_fullchain_batch.sh
+++ b/tests/manual/tests_via_nomad/run_test_fullchain_batch.sh
@@ -8,5 +8,5 @@ sleep 1
 nomad stop asapo-test
 . ./clean_after_tests.sh
 
-nomad run asapo-test_filegen_worker_batch.nomad
+nomad run asapo-test_filegen_consumer_batch.nomad
 
diff --git a/tests/manual/tests_via_nomad/run_test_fullchain_multisource.sh b/tests/manual/tests_via_nomad/run_test_fullchain_multisource.sh
index 8a5a8687cc32dba70cd68321b2371168b70bbfe4..406ac91cca6f32c8a2acf0ca2733690dec3cf7a4 100755
--- a/tests/manual/tests_via_nomad/run_test_fullchain_multisource.sh
+++ b/tests/manual/tests_via_nomad/run_test_fullchain_multisource.sh
@@ -8,5 +8,5 @@ sleep 1
 nomad stop asapo-test
 . ./clean_after_tests.sh
 
-nomad run asapo-test_filegen_worker_1M_multisource.nomad
+nomad run asapo-test_filegen_consumer_1M_multisource.nomad
 
diff --git a/worker/api/cpp/include/asapo_worker.h b/worker/api/cpp/include/asapo_worker.h
deleted file mode 100644
index 586ebc769b94af42882c80ceda5fa36882e020b9..0000000000000000000000000000000000000000
--- a/worker/api/cpp/include/asapo_worker.h
+++ /dev/null
@@ -1,8 +0,0 @@
-#ifndef ASAPO_ASAPO_WORKER_H
-#define ASAPO_ASAPO_WORKER_H
-
-#include "worker/data_broker.h"
-#include "worker/worker_error.h"
-#include "common/version.h"
-
-#endif //ASAPO_ASAPO_WORKER_H
diff --git a/worker/api/cpp/include/worker/worker_error.h b/worker/api/cpp/include/worker/worker_error.h
deleted file mode 100644
index 67839db624f0333ce4e271258d66198ac64d6f80..0000000000000000000000000000000000000000
--- a/worker/api/cpp/include/worker/worker_error.h
+++ /dev/null
@@ -1,85 +0,0 @@
-#ifndef ASAPO_WORKER_ERROR_H
-#define ASAPO_WORKER_ERROR_H
-
-#include "common/error.h"
-#include "common/io_error.h"
-
-namespace asapo {
-
-enum class WorkerErrorType {
-    kMemoryError,
-    kEmptyDatasource,
-    kSourceNotFound,
-    kSourceNotConnected,
-    kSourceAlreadyConnected,
-    kErrorReadingSource,
-    kNotFound,
-    kPermissionDenied,
-    kNoData,
-    kWrongInput,
-    kAuthorizationError,
-    kInternalError,
-    kUnknownIOError
-};
-
-
-using WorkerErrorTemplate = ServiceErrorTemplate<WorkerErrorType, ErrorType::kWorkerError>;
-
-namespace WorkerErrorTemplates {
-
-auto const kMemoryError = WorkerErrorTemplate{
-    "Memory Error", WorkerErrorType::kMemoryError
-};
-
-auto const kEmptyDatasource = WorkerErrorTemplate{
-    "Empty Data Source", WorkerErrorType::kEmptyDatasource
-};
-
-auto const kSourceNotFound = WorkerErrorTemplate{
-    "Source Not Found", WorkerErrorType::kSourceNotFound
-};
-
-auto const kSourceNotConnected = WorkerErrorTemplate{
-    "Source Not Connected", WorkerErrorType::kSourceNotConnected
-};
-
-auto const kSourceAlreadyConnected = WorkerErrorTemplate{
-    "Source Already Connected", WorkerErrorType::kSourceAlreadyConnected
-};
-
-auto const kErrorReadingSource = WorkerErrorTemplate{
-    "Error Reading Source", WorkerErrorType::kErrorReadingSource
-};
-
-auto const kNotFound = WorkerErrorTemplate{
-    "Uri not found", WorkerErrorType::kNotFound
-};
-
-auto const kPermissionDenied = WorkerErrorTemplate{
-    "Permission Denied", WorkerErrorType::kPermissionDenied
-};
-
-auto const kNoData = WorkerErrorTemplate{
-    "No Data", WorkerErrorType::kNoData
-};
-
-auto const kWrongInput = WorkerErrorTemplate{
-    "Wrong Input", WorkerErrorType::kWrongInput
-};
-
-auto const kAuthorizationError = WorkerErrorTemplate{
-    "Authorization Error", WorkerErrorType::kAuthorizationError
-};
-
-auto const kInternalError = WorkerErrorTemplate{
-    "Internal Error", WorkerErrorType::kInternalError
-};
-
-auto const kUnknownIOError = WorkerErrorTemplate{
-    "Unknown IO Error", WorkerErrorType::kUnknownIOError
-};
-
-}
-}
-
-#endif //ASAPO_WORKER_ERROR_H
diff --git a/worker/api/cpp/src/folder_data_broker.cpp b/worker/api/cpp/src/folder_data_broker.cpp
deleted file mode 100644
index cdc0fb1fa518392d5463e3f9632e6b5b85960089..0000000000000000000000000000000000000000
--- a/worker/api/cpp/src/folder_data_broker.cpp
+++ /dev/null
@@ -1,130 +0,0 @@
-#include "folder_data_broker.h"
-
-#include "io/io_factory.h"
-#include "preprocessor/definitions.h"
-#include "worker/worker_error.h"
-
-namespace asapo {
-
-FolderDataBroker::FolderDataBroker(const std::string& source_name) :
-    io__{GenerateDefaultIO()}, base_path_{source_name}, is_connected_{false},
-    current_file_{ -1} {
-}
-
-Error FolderDataBroker::Connect() {
-    std::lock_guard<std::mutex> lock{mutex_};
-
-    if (is_connected_) {
-        return WorkerErrorTemplates::kSourceAlreadyConnected.Generate();
-    }
-
-    Error error;
-    filelist_ = io__->FilesInFolder(base_path_, &error);
-
-    if (error == nullptr) {
-        is_connected_ = true;
-        return nullptr;
-    }
-
-    return error;
-}
-
-Error FolderDataBroker::CanGetData(FileInfo* info, FileData* data, uint64_t nfile) const noexcept {
-    if (!is_connected_) {
-        return WorkerErrorTemplates::kSourceNotConnected.Generate();
-    }
-
-    if (info == nullptr) {
-        return WorkerErrorTemplates::kWrongInput.Generate();
-    }
-
-    if (nfile >= (uint64_t) filelist_.size()) {
-        return asapo::ErrorTemplates::kEndOfFile.Generate("No Data");
-    }
-    return nullptr;
-}
-
-
-Error FolderDataBroker::RetrieveData(FileInfo* info, FileData* data) {
-    if (data == nullptr || info == nullptr ) {
-        return TextError("pointers are empty");
-    }
-
-    Error error;
-    *data = io__->GetDataFromFile(info->FullName(base_path_), &info->size, &error);
-    return error;
-}
-
-
-Error FolderDataBroker::GetFileByIndex(uint64_t nfile_to_get, FileInfo* info, FileData* data) {
-    auto err = CanGetData(info, data, nfile_to_get);
-    if (err != nullptr) {
-        return err;
-    }
-
-    *info = filelist_[(size_t) nfile_to_get];
-
-    if (data == nullptr) {
-        return nullptr;
-    }
-
-    return RetrieveData(info, data);
-}
-
-
-Error FolderDataBroker::GetNext(FileInfo* info, std::string group_id, FileData* data) {
-// could probably use atomic here, but just to make sure (tests showed no performance difference)
-    mutex_.lock();
-    uint64_t nfile_to_get = ++current_file_;
-    mutex_.unlock();
-
-    return GetFileByIndex(nfile_to_get, info, data);
-
-}
-Error FolderDataBroker::GetLast(FileInfo* info, std::string group_id,  FileData* data) {
-    uint64_t nfile_to_get = filelist_.size() - 1;
-    return GetFileByIndex(nfile_to_get, info, data);
-}
-
-std::string FolderDataBroker::GenerateNewGroupId(Error* err) {
-    *err = nullptr;
-    return "";
-}
-Error FolderDataBroker::ResetCounter(std::string group_id) {
-    std::lock_guard<std::mutex> lock{mutex_};
-    current_file_ = -1;
-    return nullptr;
-}
-uint64_t FolderDataBroker::GetNDataSets(Error* err) {
-    std::lock_guard<std::mutex> lock{mutex_};
-    return filelist_.size();
-}
-
-Error FolderDataBroker::GetById(uint64_t id, FileInfo* info, std::string group_id, FileData* data) {
-    return GetFileByIndex(id - 1 , info, data);
-}
-
-std::string FolderDataBroker::GetBeamtimeMeta(Error* err) {
-    return io__->ReadFileToString(base_path_ + kPathSeparator + "beamtime_global.meta", err);
-}
-
-FileInfos FolderDataBroker::QueryImages(std::string query, Error* err) {
-    *err = TextError("Not supported for folder data broker");
-    return FileInfos{};
-}
-
-DataSet FolderDataBroker::GetNextDataset(std::string group_id, Error* err) {
-    *err = TextError("Not supported for folder data broker");
-    return {0, FileInfos{}};
-}
-DataSet FolderDataBroker::GetLastDataset(std::string group_id, Error* err) {
-    *err = TextError("Not supported for folder data broker");
-    return {0, FileInfos{}};
-}
-DataSet FolderDataBroker::GetDatasetById(uint64_t id, std::string group_id, Error* err) {
-    *err = TextError("Not supported for folder data broker");
-    return {0, FileInfos{}};
-}
-
-
-}
diff --git a/worker/api/cpp/src/folder_data_broker.h b/worker/api/cpp/src/folder_data_broker.h
deleted file mode 100644
index 372ef2d514535e7bad5a0c82c348abdd403895dc..0000000000000000000000000000000000000000
--- a/worker/api/cpp/src/folder_data_broker.h
+++ /dev/null
@@ -1,45 +0,0 @@
-#ifndef ASAPO_FOLDERDATABROKER_H
-#define ASAPO_FOLDERDATABROKER_H
-
-#include "worker/data_broker.h"
-
-#include <string>
-#include <mutex>
-
-#include "io/io.h"
-
-namespace asapo {
-
-class FolderDataBroker final : public asapo::DataBroker {
-  public:
-    explicit FolderDataBroker(const std::string& source_name);
-    Error Connect() override;
-    Error ResetCounter(std::string group_id) override;
-    Error GetNext(FileInfo* info, std::string group_id, FileData* data) override;
-    Error GetLast(FileInfo* info, std::string group_id, FileData* data) override;
-    void SetTimeout(uint64_t timeout_ms) override {}; // to timeout in this case
-    std::string GenerateNewGroupId(Error* err)
-    override; // return "0" always and no error - no group ids for folder datra broker
-    std::string GetBeamtimeMeta(Error* err) override;
-    uint64_t GetNDataSets(Error* err) override;
-    Error GetById(uint64_t id, FileInfo* info, std::string group_id, FileData* data) override;
-    std::unique_ptr<asapo::IO> io__; // modified in testings to mock system calls,otherwise do not touch
-    FileInfos QueryImages(std::string query, Error* err) override;
-    DataSet GetNextDataset(std::string group_id, Error* err) override;
-    DataSet GetLastDataset(std::string group_id, Error* err) override;
-    DataSet GetDatasetById(uint64_t id, std::string group_id, Error* err) override;
-    Error RetrieveData(FileInfo* info, FileData* data) override;
-  private:
-    std::string base_path_;
-    bool is_connected_;
-    int current_file_;
-    FileInfos  filelist_;
-    Error CanGetData(FileInfo* info, FileData* data, uint64_t nfile) const noexcept;
-    Error GetFileByIndex(uint64_t nfile_to_get, FileInfo* info, FileData* data);
-    std::mutex mutex_;
-};
-
-}
-
-#endif //ASAPO_FOLDERDATABROKER_H
-
diff --git a/worker/api/cpp/unittests/test_folder_broker.cpp b/worker/api/cpp/unittests/test_folder_broker.cpp
deleted file mode 100644
index d72d46166bfd0b9e79c7328f9e49054c5898c927..0000000000000000000000000000000000000000
--- a/worker/api/cpp/unittests/test_folder_broker.cpp
+++ /dev/null
@@ -1,417 +0,0 @@
-#include <gmock/gmock.h>
-#include <unittests/MockIO.h>
-#include "gtest/gtest.h"
-
-#include "worker/data_broker.h"
-#include "worker/worker_error.h"
-#include "io/io.h"
-#include "../../../../common/cpp/src/system_io/system_io.h"
-#include "../src/folder_data_broker.h"
-#include "preprocessor/definitions.h"
-
-using asapo::DataBrokerFactory;
-using asapo::DataBroker;
-using asapo::FolderDataBroker;
-using asapo::IO;
-using asapo::FileInfos;
-using asapo::FileInfo;
-using asapo::FileData;
-using asapo::Error;
-using asapo::TextError;
-using asapo::SimpleError;
-
-using ::testing::AtLeast;
-using ::testing::Eq;
-using ::testing::Ne;
-using ::testing::Test;
-using ::testing::_;
-using ::testing::Mock;
-
-
-namespace {
-
-TEST(FolderDataBroker, SetCorrectIO) {
-    auto data_broker = new FolderDataBroker("test");
-    ASSERT_THAT(dynamic_cast<asapo::SystemIO*>(data_broker->io__.get()), Ne(nullptr));
-    delete data_broker;
-}
-
-class FakeIO: public asapo::MockIO {
-  public:
-
-    FileInfos FilesInFolder(const std::string& folder, Error* err) const override {
-        *err = nullptr;
-        FileInfos file_infos;
-        FileInfo fi;
-        fi.size = 100;
-        fi.name = "1";
-        file_infos.push_back(fi);
-        fi.name = "2";
-        file_infos.push_back(fi);
-        fi.name = "3";
-        file_infos.push_back(fi);
-        return file_infos;
-    }
-};
-
-class IOFolderNotFound: public FakeIO {
-  public:
-    FileInfos FilesInFolder(const std::string& folder, Error* err) const override {
-        *err = asapo::IOErrorTemplates::kFileNotFound.Generate();
-        return {};
-    }
-};
-
-class IOFolderUnknownError: public FakeIO {
-  public:
-    FileInfos FilesInFolder(const std::string& folder, Error* err) const override {
-        *err  = asapo::IOErrorTemplates::kUnknownIOError.Generate();
-        return {};
-    }
-};
-
-class IOEmptyFolder: public FakeIO {
-  public:
-    FileInfos FilesInFolder(const std::string& folder, Error* err) const override {
-        *err = nullptr;
-        return {};
-    }
-};
-
-class IOCannotOpenFile: public FakeIO {
-  public:
-    FileData GetDataFromFile(const std::string& fname, uint64_t* fsize, Error* err) const noexcept override {
-        *err = asapo::IOErrorTemplates::kPermissionDenied.Generate();
-        return {};
-    };
-};
-
-
-
-class FolderDataBrokerTests : public Test {
-  public:
-    std::unique_ptr<FolderDataBroker> data_broker;
-    void SetUp() override {
-        data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker("/path/to/file")};
-        data_broker->io__ = std::unique_ptr<IO> {new FakeIO()};
-    }
-    void TearDown() override {
-    }
-};
-
-TEST_F(FolderDataBrokerTests, CanConnect) {
-    auto return_code = data_broker->Connect();
-
-    ASSERT_THAT(return_code, Eq(nullptr));
-}
-
-TEST_F(FolderDataBrokerTests, CannotConnectTwice) {
-    data_broker->Connect();
-
-    auto err = data_broker->Connect();
-
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kSourceAlreadyConnected));
-}
-
-
-TEST_F(FolderDataBrokerTests, CannotConnectWhenNoFolder) {
-    data_broker->io__ = std::unique_ptr<IO> {new IOFolderNotFound()};
-
-    auto err = data_broker->Connect();
-
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kFileNotFound));
-}
-
-TEST_F(FolderDataBrokerTests, ConnectReturnsUnknownIOError) {
-    data_broker->io__ = std::unique_ptr<IO> {new IOFolderUnknownError()};
-
-    auto err = data_broker->Connect();
-
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kUnknownIOError));
-}
-
-TEST_F(FolderDataBrokerTests, GetNextWithoutConnectReturnsError) {
-    auto err = data_broker->GetNext(nullptr, "", nullptr);
-
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kSourceNotConnected));
-}
-
-TEST_F(FolderDataBrokerTests, GetNextWithNullPointersReturnsError) {
-    data_broker->Connect();
-
-    auto err = data_broker->GetNext(nullptr, "", nullptr);
-
-    ASSERT_THAT(err, Eq(asapo::WorkerErrorTemplates::kWrongInput));
-}
-
-TEST_F(FolderDataBrokerTests, GetNextReturnsFileInfo) {
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err = data_broker->GetNext(&fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("1"));
-    ASSERT_THAT(fi.size, Eq(100));
-
-}
-
-TEST_F(FolderDataBrokerTests, GetNDataSets) {
-    data_broker->Connect();
-    Error err;
-    auto n = data_broker->GetNDataSets(&err);
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(n, Eq(3));
-}
-
-
-TEST_F(FolderDataBrokerTests, GetLastReturnsFileInfo) {
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err = data_broker->GetLast(&fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("3"));
-    ASSERT_THAT(fi.size, Eq(100));
-
-}
-
-TEST_F(FolderDataBrokerTests, GetLastSecondTimeReturnsSameFileInfo) {
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err = data_broker->GetLast(&fi, "", nullptr);
-    ASSERT_THAT(err, Eq(nullptr));
-    err = data_broker->GetLast(&fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("3"));
-    ASSERT_THAT(fi.size, Eq(100));
-
-}
-
-
-
-TEST_F(FolderDataBrokerTests, SecondNextReturnsAnotherFileInfo) {
-    data_broker->Connect();
-    FileInfo fi;
-    data_broker->GetNext(&fi, "", nullptr);
-
-    auto err = data_broker->GetNext(&fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("2"));
-}
-
-TEST_F(FolderDataBrokerTests, SecondNextReturnsSameFileInfoIfReset) {
-    data_broker->Connect();
-    FileInfo fi;
-    data_broker->GetNext(&fi, "", nullptr);
-
-    auto err = data_broker->ResetCounter("");
-    ASSERT_THAT(err, Eq(nullptr));
-
-    err = data_broker->GetNext(&fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("1"));
-}
-
-TEST_F(FolderDataBrokerTests, GetNextFromEmptyFolderReturnsError) {
-    data_broker->io__ = std::unique_ptr<IO> {new IOEmptyFolder()};
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err = data_broker->GetNext(&fi, "", nullptr);
-    ASSERT_THAT(err, Eq(asapo::ErrorTemplates::kEndOfFile));
-}
-
-TEST_F(FolderDataBrokerTests, GetNextReturnsErrorWhenFilePermissionsDenied) {
-    data_broker->io__ = std::unique_ptr<IO> {new IOCannotOpenFile()};
-    data_broker->Connect();
-    FileInfo fi;
-    FileData data;
-
-    auto err = data_broker->GetNext(&fi, "", &data);
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kPermissionDenied));
-}
-
-
-class OpenFileMock : public FakeIO {
-};
-
-class GetDataFromFileTests : public Test {
-  public:
-    std::unique_ptr<FolderDataBroker> data_broker;
-    OpenFileMock mock;
-    FileInfo fi;
-    FileData data;
-    std::string expected_base_path = "/path/to/file";
-    void SetUp() override {
-        data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker(expected_base_path)};
-        data_broker->io__ = std::unique_ptr<IO> {&mock};
-        data_broker->Connect();
-    }
-    void TearDown() override {
-        data_broker->io__.release();
-    }
-};
-
-TEST_F(GetDataFromFileTests, GetNextCallsGetDataFileWithFileName) {
-    EXPECT_CALL(mock, GetDataFromFile_t(std::string("/path/to/file") + asapo::kPathSeparator + "1", _, _)).
-    WillOnce(DoAll(testing::SetArgPointee<2>(static_cast<SimpleError*>(nullptr)), testing::Return(nullptr)));
-
-    data_broker->GetNext(&fi, "", &data);
-}
-
-
-TEST_F(GetDataFromFileTests, GetNextReturnsDataAndInfo) {
-    EXPECT_CALL(mock, GetDataFromFile_t(_, _, _)).
-    WillOnce(DoAll(testing::SetArgPointee<2>(nullptr), testing::Return(new uint8_t[1] {'1'})));
-
-    data_broker->GetNext(&fi, "", &data);
-
-    ASSERT_THAT(data[0], Eq('1'));
-    ASSERT_THAT(fi.name, Eq("1"));
-
-}
-
-
-TEST_F(GetDataFromFileTests, RetrieveDataCallsReadsFile) {
-    data_broker->Connect();
-    FileInfo fi;
-    fi.name = "test";
-
-
-    EXPECT_CALL(mock, GetDataFromFile_t(expected_base_path + asapo::kPathSeparator + "test", _, _)).
-    WillOnce(DoAll(testing::SetArgPointee<2>(nullptr), testing::Return(new uint8_t[1] {'1'})));
-
-    auto err = data_broker->RetrieveData(&fi, &data);
-
-    ASSERT_THAT(data[0], Eq('1'));
-    ASSERT_THAT(err, Eq(nullptr));
-}
-
-TEST_F(GetDataFromFileTests, RetrieveDataReturnsErrorWithEmptyPointer) {
-    data_broker->Connect();
-
-    auto err = data_broker->RetrieveData(&fi, nullptr);
-
-    ASSERT_THAT(err, Ne(nullptr));
-}
-
-
-
-TEST_F(GetDataFromFileTests, GetNextReturnsErrorWhenCannotReadData) {
-    EXPECT_CALL(mock, GetDataFromFile_t(_, _, _)).
-    WillOnce(DoAll(testing::SetArgPointee<2>(asapo::IOErrorTemplates::kReadError.Generate().release()),
-                   testing::Return(nullptr)));
-
-    auto err = data_broker->GetNext(&fi, "", &data);
-
-    ASSERT_THAT(err->Explain(), Eq(asapo::IOErrorTemplates::kReadError.Generate()->Explain()));
-}
-
-TEST_F(GetDataFromFileTests, GetNextReturnsErrorWhenCannotAllocateData) {
-    EXPECT_CALL(mock, GetDataFromFile_t(_, _, _)).
-    WillOnce(DoAll(testing::SetArgPointee<2>(asapo::ErrorTemplates::kMemoryAllocationError.Generate().release()),
-                   testing::Return(nullptr)));
-
-    auto err = data_broker->GetNext(&fi, "", &data);
-
-    ASSERT_THAT(err->Explain(), Eq(asapo::ErrorTemplates::kMemoryAllocationError.Generate()->Explain()));
-}
-
-
-TEST_F(FolderDataBrokerTests, GetByIdReturnsFileInfo) {
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err = data_broker->GetById(1, &fi, "", nullptr);
-
-    ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(fi.name, Eq("1"));
-    ASSERT_THAT(fi.size, Eq(100));
-
-}
-
-TEST_F(FolderDataBrokerTests, GetByIdReturnsError) {
-    data_broker->Connect();
-    FileInfo fi;
-
-    auto err1 = data_broker->GetById(0, &fi, "", nullptr);
-    auto err2 = data_broker->GetById(10, &fi, "", nullptr);
-
-    ASSERT_THAT(err1, Ne(nullptr));
-    ASSERT_THAT(err2, Ne(nullptr));
-}
-
-TEST_F(GetDataFromFileTests, GetMetaDataReturnsError) {
-    EXPECT_CALL(mock, ReadFileToString_t(_, _)).
-    WillOnce(DoAll(testing::SetArgPointee<1>(asapo::IOErrorTemplates::kReadError.Generate().release()),
-                   testing::Return("")));
-
-    Error err;
-    auto meta = data_broker->GetBeamtimeMeta(&err);
-    ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kReadError));
-}
-
-TEST_F(GetDataFromFileTests, GetMetaDataReturnsOK) {
-    EXPECT_CALL(mock, ReadFileToString_t(expected_base_path + asapo::kPathSeparator + "beamtime_global.meta", _)).
-    WillOnce(DoAll(testing::SetArgPointee<1>(nullptr),
-                   testing::Return("OK")));
-
-    Error err;
-    auto meta = data_broker->GetBeamtimeMeta(&err);
-    ASSERT_THAT(meta, Eq("OK"));
-    ASSERT_THAT(err, Eq(nullptr));
-}
-
-TEST(FolderDataBroker, QueryImages) {
-    auto data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker("test")};
-
-    Error err;
-    auto infos = data_broker->QueryImages("bla", &err);
-
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(infos.size(), Eq(0));
-}
-
-
-TEST(FolderDataBroker, NextDataset) {
-    auto data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker("test")};
-
-    Error err;
-    auto dataset = data_broker->GetNextDataset("bla", &err);
-
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(dataset.content.size(), Eq(0));
-    ASSERT_THAT(dataset.id, Eq(0));
-}
-
-TEST(FolderDataBroker, LastDataset) {
-    auto data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker("test")};
-
-    Error err;
-    auto dataset = data_broker->GetLastDataset("bla", &err);
-
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(dataset.content.size(), Eq(0));
-    ASSERT_THAT(dataset.id, Eq(0));
-}
-
-
-TEST(FolderDataBroker, DatasetById) {
-    auto data_broker = std::unique_ptr<FolderDataBroker> {new FolderDataBroker("test")};
-
-    Error err;
-    auto dataset = data_broker->GetDatasetById(0, "bla", &err);
-
-    ASSERT_THAT(err, Ne(nullptr));
-    ASSERT_THAT(dataset.content.size(), Eq(0));
-    ASSERT_THAT(dataset.id, Eq(0));
-}
-
-
-}