diff --git a/.gitignore b/.gitignore
index 5f8c3feae5ccb8f8500cf0d54ded6da0079cfe02..d7c8d680d0598ba1ac993843cf1d2ee7f45fad02 100644
--- a/.gitignore
+++ b/.gitignore
@@ -129,7 +129,7 @@ asapo_tools/pkg
 
 #version files
 
-common/cpp/include/asapo/common/version.h
+common/cpp/include/asapo/common/internal/version.h
 common/go/src/asapo_common/version/version_lib.go
 
 
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1a5ec00419269b0849b8be9e0285775a73271113..27c7a0824aafd5d923b714998da6b8de03a3146b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,17 @@
+## 21.03.0
+
+ IMPROVEMENTS
+* Producer API - queue limits in Python, for C++ return original data in error custom data      
+* Consumer API - add GetCurrentDatasetCount/get_current_dataset_count function with option to include or exclude incomplete datasets
+* Consumer API - GetStreamList/get_stream_list - can filter finished/unfinished streams now
+* Producer/Consumer API - StreamInfo structure/Python dictionary include more information (is stream finished or not, ...) 
+* Switch to JWT tokens (token has more symbols, expiration time, can be revoked and there are two type of tokens - with read/write access rights)
+* Improved versioning. Producer/Consumer API - introduce GetVersionInfo/get_version_info, compatiblity check between clients and server
+
+BREAKING CHANGES
+* Consumer API (C++ only)- GetStreamList has now extra argument StreamFilter
+* Consumer/Producer libraries need to be updated due to protocol changes
+
 ## 20.12.0
 
 FEATURES
diff --git a/CMakeLists.txt b/CMakeLists.txt
index bdba0ff646a6281f95f0ce701e2744db132003a3..b63879369a8f34781829739606389732150ce49e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,5 +1,17 @@
 cmake_minimum_required(VERSION 3.7)
 project(ASAPO)
+
+#protocol version changes if one of the microservice API's change
+set (ASAPO_CONSUMER_PROTOCOL "v0.1")
+set (ASAPO_PRODUCER_PROTOCOL "v0.1")
+
+set (ASAPO_DISCOVERY_API_VER "v0.1")
+set (ASAPO_AUTHORIZER_API_VER "v0.1")
+set (ASAPO_BROKER_API_VER "v0.1")
+set (ASAPO_FILE_TRANSFER_SERVICE_API_VER "v0.1")
+set (ASAPO_RECEIVER_API_VER "v0.1")
+set (ASAPO_RDS_API_VER "v0.1")
+
 set(CMAKE_CXX_STANDARD 11)
 IF(WIN32)
     set(CMAKE_CXX_FLAGS_DEBUG "/MTd")
diff --git a/CMakeModules/coverage_go.sh b/CMakeModules/coverage_go.sh
index 13ddb7a413da1f36d24fd24021a75760a25bc082..5b01cf48bb047093f0605fb0b115751c1aa38be4 100755
--- a/CMakeModules/coverage_go.sh
+++ b/CMakeModules/coverage_go.sh
@@ -4,8 +4,6 @@ SOURCE_DIR=$1
 OUT_DIR=$2
 ASAPO_MINIMUM_COVERAGE=$3
 
-export GOPATH=$GOPATH:$4
-
 echo $OUT_DIR
 
 touch $OUT_DIR/coverage.out
diff --git a/CMakeModules/prepare_asapo.cmake b/CMakeModules/prepare_asapo.cmake
index 0c5baa12c4bf1815588ba447d22fe4fe78a51d2a..7ef66d96fc259d1a812691f43e245a7d2205164f 100644
--- a/CMakeModules/prepare_asapo.cmake
+++ b/CMakeModules/prepare_asapo.cmake
@@ -50,6 +50,8 @@ function(prepare_asapo)
     configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json.tpl broker.json.tpl COPYONLY)
     configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/file_transfer_settings.json.tpl file_transfer.json.tpl COPYONLY)
     configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
+    configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/admin_token.key admin_token.key COPYONLY)
+    configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret_admin.key auth_secret_admin.key COPYONLY)
     configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/nginx.conf.tpl nginx.conf.tpl COPYONLY)
     configure_file(${CMAKE_SOURCE_DIR}/config/nomad/nginx.nmd.in nginx.nmd @ONLY)
 
diff --git a/CMakeModules/prepare_version.cmake b/CMakeModules/prepare_version.cmake
index 02bd80de29d1097719e79e97c87df80b850e7a76..7ef56e9486d3c798234c767aedcca48654cf5821 100644
--- a/CMakeModules/prepare_version.cmake
+++ b/CMakeModules/prepare_version.cmake
@@ -1,4 +1,4 @@
 string(TIMESTAMP TIMESTAMP "%H:%M:%S %d.%m.%Y UTC" UTC)
 
-configure_file(${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/version.h.in ${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/version.h @ONLY)
+configure_file(${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/internal/version.h.in ${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/internal/version.h @ONLY)
 configure_file(${PROJECT_SOURCE_DIR}/common/go/src/asapo_common/version/version_lib.go.in ${PROJECT_SOURCE_DIR}/common/go/src/asapo_common/version/version_lib.go @ONLY)
diff --git a/CMakeModules/prepare_version_tag.cmake b/CMakeModules/prepare_version_tag.cmake
index 308beae3a9cf818823b21b68b568e6dbdf1e0085..35273cb2576e98f211cd4fb36b662070dc417e92 100644
--- a/CMakeModules/prepare_version_tag.cmake
+++ b/CMakeModules/prepare_version_tag.cmake
@@ -14,7 +14,7 @@ execute_process(COMMAND git rev-parse --abbrev-ref HEAD
 string(STRIP ${BRANCH} BRANCH)
 cleanup(BRANCH)
 
-execute_process(COMMAND git rev-parse --short HEAD
+execute_process(COMMAND git rev-parse --short=10 HEAD
         OUTPUT_VARIABLE ASAPO_VERSION_COMMIT
         WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
 string(STRIP ${ASAPO_VERSION_COMMIT} ASAPO_VERSION_COMMIT)
diff --git a/CMakeModules/testing_cpp.cmake b/CMakeModules/testing_cpp.cmake
index af30b9f802b0adf31f95a012855ea6586c711f7e..cd62e14b3178ba319c894c06ca8e124db9ddc066 100644
--- a/CMakeModules/testing_cpp.cmake
+++ b/CMakeModules/testing_cpp.cmake
@@ -2,6 +2,18 @@ if (BUILD_TESTS OR BUILD_INTEGRATION_TESTS OR BUILD_EXAMPLES)
     enable_testing()
 endif ()
 
+set (TOKENS "ASAPO_TEST_RW_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyMXJqaXB0MzVja3MzYTEwZyIsInN1YiI6ImJ0X2FzYXBvX3Rlc3QiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIiwid3JpdGUiXX19.3PFdG0f48yKrOyJwPErYcewpcbZgnd8rBmBphw_kdJ0")
+set (TOKENS "${TOKENS};ASAPO_CREATE_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyYzMzaXB0Mzdkb3IzYmZjZyIsInN1YiI6ImFkbWluIiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsiY3JlYXRlIl19fQ.AI41cZ7dZL0g-rrdKIQgd7ijjzuyH1Fm0xojCXwLNBo")
+set (TOKENS "${TOKENS};C20180508_000_COM20181_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyaDRiaXB0Mzd1cGo1aDdlMCIsInN1YiI6ImJ0X2MyMDE4MDUwOC0wMDAtQ09NMjAxODEiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIiwid3JpdGUiXX19.yONpjW2ybZMc9E9Eu4Hmn1roVR-mxf2OQQyXfnel5C8")
+set (TOKENS "${TOKENS};BT11000015_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyajZqaXB0MzA3aHU1amwxZyIsInN1YiI6ImJ0XzExMDAwMDE1IiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsicmVhZCJdfX0.kVs669HAS4sj9VAZk8pWTLrYNQp46mOnH4id4-_qd9g")
+set (TOKENS "${TOKENS};BT11000016_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyajQzaXB0MzA3OWxwc3Z2ZyIsInN1YiI6ImJ0XzExMDAwMDE2IiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsicmVhZCJdfX0.mpTVGtcdR0l4NaeHFTf16iWrfMYaLzh2pAjN5muil6Q")
+set (TOKENS "${TOKENS};BLP07_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyaXBqaXB0MzAzajNsZ3NnZyIsInN1YiI6ImJsX3AwNyIsIkV4dHJhQ2xhaW1zIjp7IkFjY2Vzc1R5cGVzIjpbInJlYWQiXX19.L0kNSCj32WHMEfzV9t0c2tKabK_klQFPZgLu66voDFc")
+set (TOKENS "${TOKENS};BLP07_W_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkya3BiaXB0MzBkMjJmMTBmMCIsInN1YiI6ImJsX3AwNyIsIkV4dHJhQ2xhaW1zIjp7IkFjY2Vzc1R5cGVzIjpbIndyaXRlIl19fQ.BEHzU8gjHWSS-E5VbSwXzOBmeqScIceVD2XACGKZ46E")
+set (TOKENS "${TOKENS};BT_DATA_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkybXEzaXB0MzBnbGp0YzlzMCIsInN1YiI6ImJ0X2RhdGEiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIl19fQ.A5lLIJl-F6BGdWHdD9o0YOs5E9UPPFTylIdJocB10HI")
+set (TOKENS "${TOKENS};BT_TEST_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkybnViaXB0MzBsMjlpcXNxMCIsInN1YiI6ImJ0X3Rlc3QiLCJFeHRyYUNsYWltcyI6eyJBY2Nlc3NUeXBlcyI6WyJyZWFkIl19fQ.8dh4KIusIVk75MGiWjoj23_cesLLWSMDjU8vb0RHVtU")
+set (TOKENS "${TOKENS};BT_AAA_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkycDFiaXB0MzBub3AwcTNlZyIsInN1YiI6ImJ0X2FhYSIsIkV4dHJhQ2xhaW1zIjp7IkFjY2Vzc1R5cGVzIjpbInJlYWQiXX19.dt3ifrG3zqQP4uM2kaoe7ydDjUdFeasOB07fVRfFApE")
+set (TOKENS "${TOKENS};BT_TEST_RUN_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTk0NjYzaXB0Mzdma2w0YmVrMCIsInN1YiI6ImJ0X3Rlc3RfcnVuIiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsicmVhZCJdfX0.QJjoGOlzMvOUk7dK2bbDgSEM5-1mO6wmpmESYL6McdU")
+
 if (BUILD_TESTS)
     set(ASAPO_MINIMUM_COVERAGE 70)
     find_package(Threads)
@@ -203,6 +215,7 @@ function(add_script_test testname arguments)
                     separate_arguments(memargs)
                     add_test(NAME memtest-${testname} COMMAND bash ${CMAKE_CURRENT_SOURCE_DIR}/check_linux.sh
                             ${memargs})
+                    set_tests_properties(memtest-${testname} PROPERTIES ENVIRONMENT "${TOKENS}")
                     set_tests_properties(memtest-${testname} PROPERTIES
                             LABELS "memcheck_${label};all"
                             DEPENDS test-${testname}
@@ -211,6 +224,7 @@ function(add_script_test testname arguments)
                 endif ()
             endif ()
         ENDIF ()
+        set_tests_properties(test-${testname} PROPERTIES ENVIRONMENT "${TOKENS}")
         set_tests_properties(test-${testname} PROPERTIES
                 LABELS "example;all"
                 )
diff --git a/CMakeModules/testing_go.cmake b/CMakeModules/testing_go.cmake
index dbba4bfe2bde4687bc416bf0da08314b87e613d3..f130ab796ec0d94aeebffc94252ba628ea55b4aa 100644
--- a/CMakeModules/testing_go.cmake
+++ b/CMakeModules/testing_go.cmake
@@ -13,11 +13,6 @@ function(gotest target source_dir test_source_files)
     if (BUILD_TESTS)
                 add_test(NAME test-${target} COMMAND go test ${test_source_files}
                 WORKING_DIRECTORY ${source_dir})
-        set_property(
-                TEST
-                test-${target}
-                PROPERTY
-                ENVIRONMENT "GOPATH=${gopath}")
         message(STATUS "Added test 'test-${target}'")
         if (CMAKE_COMPILER_IS_GNUCXX)
         add_test(NAME coveragetest-${target}
@@ -29,16 +24,11 @@ function(gotest target source_dir test_source_files)
     endif ()
 endfunction()
 
-function(go_integration_test target test_source_files label)
+function(go_integration_test target source_dir test_source_files label)
     if (BUILD_TESTS)
         add_test(NAME test-${target} COMMAND go test ${test_source_files} -run ${label}
                 -tags integration_tests
-                WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
-        set_property(
-                TEST
-                test-${target}
-                PROPERTY
-                ENVIRONMENT "GOPATH=${gopath}")
+                WORKING_DIRECTORY ${source_dir})
         message(STATUS "Added test 'test-${target}'")
     endif ()
 endfunction()
diff --git a/PROTOCOL-VERSIONS.md b/PROTOCOL-VERSIONS.md
new file mode 100644
index 0000000000000000000000000000000000000000..e47b371f29a04c26cd9b121159f1453bf4a99a17
--- /dev/null
+++ b/PROTOCOL-VERSIONS.md
@@ -0,0 +1,10 @@
+### Producer Protocol
+| Release      | Supported by client | Supported by server  | Status           |
+| ------------ | ------------------- | -------------------- | ---------------- |
+| v0.1         | 21.03.0 - 21.03.0   | 21.03.0  - 21.03.0   | Current version  |
+
+
+### Consumer Protocol
+| Release      | Supported by client | Supported by server  | Status           |
+| ------------ | ------------------- | -------------------- | ---------------- |
+| v0.1         | 21.03.0 - 21.03.0   | 21.03.0  - 21.03.0   | Current version  |
diff --git a/VERSIONS.md b/VERSIONS.md
new file mode 100644
index 0000000000000000000000000000000000000000..67aeaf79892c01d4e1ab4a5b850826d4a5837900
--- /dev/null
+++ b/VERSIONS.md
@@ -0,0 +1,11 @@
+### Producer API
+
+| Release      | API changed | Breaking changes | Protocol | Supported by server up to | Status |
+| ------------ | ----------- |----------------- | -------- | ------------------------- | ------- |
+| 21.03.0      | Yes         | No               | v0.1     | 21.03.0                   | Current version  |
+
+### Consumer API
+
+| Release      | API changed | Breaking changes | Protocol | Supported by server up to | Status |
+| ------------ | ----------- |----------------- | -------- | ------------------------- | ------- |
+| 21.03.0      | Yes         | Yes              | v0.1     | 21.03.0                   | Current version  |
diff --git a/asapo_tools/CMakeLists.txt b/asapo_tools/CMakeLists.txt
index 48cf94c7b756447465c55884be6c8ebd2bbe8f5e..4ac248cce955ad6ced89a510d9962111ef15ce37 100644
--- a/asapo_tools/CMakeLists.txt
+++ b/asapo_tools/CMakeLists.txt
@@ -1,20 +1,8 @@
 set (TARGET_NAME asapo)
 
-if (NOT "$ENV{GOPATH}" STREQUAL "")
-	set(GOPATH $ENV{GOPATH})
-endif()
-
-if (NOT GOPATH)
-    message (FATAL_ERROR "GOPATH not set")
-endif()
-
-message(STATUS "global gopath ${GOPATH}")
-
 IF(WIN32)
-    set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go")
     set (exe_name "${TARGET_NAME}.exe")
 ELSE()
-    set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go)
     set (exe_name "${TARGET_NAME}")
 #    set (GO_OPTS "GOOS=linux;CGO_ENABLED=0")
 ENDIF()
@@ -22,13 +10,13 @@ ENDIF()
 include(testing_go)
 
 add_custom_target(asapo ALL
-    COMMAND  ${CMAKE_COMMAND} -E env GOPATH=${gopath}
-        ${GO_OPTS} go build -o ${exe_name} asapo_tools/main
-    VERBATIM)
+    COMMAND  go build ${GO_OPTS} -o ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} main/asapo.go
+        WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_tools
+        VERBATIM)
 define_property(TARGET PROPERTY EXENAME
         BRIEF_DOCS <executable name>
         FULL_DOCS <full-doc>)
 
 set_target_properties(${TARGET_NAME} PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name})
 
-gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}" "./...")
+gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_tools" "./...")
diff --git a/asapo_tools/src/asapo_tools/cli/command.go b/asapo_tools/src/asapo_tools/cli/command.go
index b61171a1a94dc23e97eebd89f41d7d8bd635198b..24f3bb619fa185f32ef84bb5dc5ee1aa954bf6d4 100644
--- a/asapo_tools/src/asapo_tools/cli/command.go
+++ b/asapo_tools/src/asapo_tools/cli/command.go
@@ -1,7 +1,6 @@
 package cli
 
 import (
-	"errors"
 	"flag"
 	"fmt"
 )
@@ -21,10 +20,6 @@ func (cmd *command) description(d string) bool {
 	return false
 }
 
-func (cmd *command) errBadOptions(err string) error {
-	return errors.New("asapo " + cmd.name + ": " + err + "\nType 'asapo " + cmd.name + " --help'")
-}
-
 // createDefaultFlagset creates new flagset and adds default help behaviour.
 func (cmd *command) createDefaultFlagset(description, args string) *flag.FlagSet {
 
diff --git a/asapo_tools/src/asapo_tools/cli/command_test.go b/asapo_tools/src/asapo_tools/cli/command_test.go
index c4e89f2239f69728ba190d4db6d7aa59011550b0..a02899e1725f039ee004b3e5aaa215dd6977fc2b 100644
--- a/asapo_tools/src/asapo_tools/cli/command_test.go
+++ b/asapo_tools/src/asapo_tools/cli/command_test.go
@@ -10,7 +10,7 @@ var CommandTests = []struct {
 	cmd    command
 	answer string
 }{
-	{command{"token", []string{"-secret", "secret_file", "beamtime"}}, "secret"},
+	{command{"token", []string{"-secret", "secret_file","-types","read","-endpoint","bla", "beamtime"}}, "secret"},
 	{command{"dummy", []string{"description"}}, "wrong"},
 }
 
@@ -22,7 +22,6 @@ func TestCommand(t *testing.T) {
 		err := DoCommand(test.cmd.name, test.cmd.args)
 		assert.Contains(t, err.Error(), test.answer, "")
 		assert.NotNil(t, err, "Should be error")
-
 	}
 
 }
diff --git a/asapo_tools/src/asapo_tools/cli/token.go b/asapo_tools/src/asapo_tools/cli/token.go
index cc68a114f10690966116d9ce74c7b73386bd9e69..be2aca99b47a46191ce1b71804f6ef20c57b1715 100644
--- a/asapo_tools/src/asapo_tools/cli/token.go
+++ b/asapo_tools/src/asapo_tools/cli/token.go
@@ -1,29 +1,40 @@
 package cli
 
 import (
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"asapo_tools/rest_client"
+	"bytes"
+	"encoding/json"
 	"errors"
-	"os"
 	"fmt"
-	"asapo_common/utils"
+	"io"
+	"net/http"
+	"os"
+	"strings"
 )
 
 type tokenFlags struct {
-	Name       string
-	SecretFile string
+	Name         string
+	Endpoint     string
+	AccessTypes  []string
+	SecretFile   string
+	DaysValid int
+	TokenDetails bool
 }
 
-func generateToken(id string,secret string) string {
-	hmac := utils.NewHMACAuth(secret)
-	token,err := hmac.GenerateToken(&id)
+func generateToken(flags tokenFlags, secret string) string {
+	//	hmac := utils.NewHMACAuth(secret)
+	//	token,err := hmac.GenerateToken(&id)
 
-	if (err!=nil) {
-		fmt.Println(err.Error())
-	}
-	return token
+	//	if (err!=nil) {
+	//		fmt.Println(err.Error())
+	//	}
+	//	return token
+	return ""
 }
 
-
-// GenerateToken generates token for consumers
+// CommandToken receives token from authorization server
 func (cmd *command) CommandToken() error {
 
 	message_string := "Generate token"
@@ -38,24 +49,70 @@ func (cmd *command) CommandToken() error {
 	}
 
 	secret, err := utils.ReadFirstStringFromFile(flags.SecretFile)
-	if err !=nil  {
+	if err != nil {
 		return err
 	}
 
-	fmt.Fprintf(outBuf, "%s\n", generateToken(flags.Name,secret))
+	request := structs.IssueTokenRequest{
+		Subject:    map[string]string{"beamtimeId": flags.Name},
+		DaysValid:  flags.DaysValid,
+		AccessTypes: flags.AccessTypes,
+	}
+	json_data, _ := json.Marshal(request)
+	path := flags.Endpoint + "/admin/issue"
 
-	return nil
-}
+	req, err := http.NewRequest("POST", path, bytes.NewBuffer(json_data))
+	if err != nil {
+		return err
+	}
+	req.Header.Add("Content-Type", "application/json")
+	req.Header.Add("Authorization", "Bearer "+secret)
 
+	resp, err := rest_client.Client.Do(req)
+	if err != nil {
+		return err
+	}
+	defer resp.Body.Close()
+
+	body, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return err
+	}
+
+	if resp.StatusCode != http.StatusOK {
+		return errors.New("returned " + resp.Status + ": " + string(body))
+	}
+
+	if flags.TokenDetails {
+		fmt.Fprintf(outBuf, "%s\n", string(body))
+		return nil
+	}
+
+	var token structs.IssueTokenResponse
+
+	err = json.Unmarshal(body, &token)
+	if err == nil {
+		fmt.Fprintf(outBuf, "%s\n", token.Token)
+	}
+	return err
+}
 
 func (cmd *command) parseTokenFlags(message_string string) (tokenFlags, error) {
 
 	var flags tokenFlags
 	flagset := cmd.createDefaultFlagset(message_string, "<token_body>")
 	flagset.StringVar(&flags.SecretFile, "secret", "", "path to file with secret")
+	var at string
+	flagset.StringVar(&at, "types", "", "access typea")
+	flagset.StringVar(&flags.Endpoint, "endpoint", "", "asapo endpoint")
+	flagset.BoolVar(&flags.TokenDetails, "token-details", false, "output token details")
+	flagset.IntVar(&flags.DaysValid, "duration-days", 180, "token duration in days")
 
 	flagset.Parse(cmd.args)
 
+	flags.AccessTypes = strings.Split(at,",")
+
+
 	if printHelp(flagset) {
 		os.Exit(0)
 	}
@@ -63,13 +120,22 @@ func (cmd *command) parseTokenFlags(message_string string) (tokenFlags, error) {
 	flags.Name = flagset.Arg(0)
 
 	if flags.Name == "" {
-		return flags, errors.New("beamtime id or beamline missed ")
+		return flags, errors.New("payload missed ")
 	}
 
 	if flags.SecretFile == "" {
 		return flags, errors.New("secret file missed ")
 	}
 
+	if flags.Endpoint == "" {
+		return flags, errors.New("endpoint missed ")
+	}
+
+	for _,at:=range flags.AccessTypes {
+		if at!="read" && at!="write" {
+			return flags,errors.New("incorrect access type")
+		}
+	}
 
 	return flags, nil
 
diff --git a/asapo_tools/src/asapo_tools/cli/token_test.go b/asapo_tools/src/asapo_tools/cli/token_test.go
index 4d443f3c493f86aceb6156e5638aeb2f802a71bb..c43ad293be48b77667b583c55b73fbb2fae2abbf 100644
--- a/asapo_tools/src/asapo_tools/cli/token_test.go
+++ b/asapo_tools/src/asapo_tools/cli/token_test.go
@@ -1,35 +1,69 @@
 package cli
 
 import (
+	"asapo_tools/mocks"
+	"asapo_tools/rest_client"
+	"encoding/json"
+	"net/http"
 	"testing"
 
-	"github.com/stretchr/testify/assert"
 	"bytes"
+	"github.com/stretchr/testify/assert"
 	"io/ioutil"
 	"os"
 )
 
 var tokenTests = []struct {
 	cmd      command
-	answer string
+	withDetails bool
+	ok bool
 	msg  string
 }{
-	{command{args: []string{"beamtime_id"}},  "secret", "no secret parameter"},
-	{command{args: []string{"-secret","secret.tmp"}},  "beamtime id", "no file"},
-	{command{args: []string{"-secret","not_existing_file","beamtime_id"}},  "not_existing_file", "no file"},
-	{command{args: []string{"-secret","secret.tmp","beamtime_id"}},  "eodk3s5ZXwACLGyVA63MZYcOTWuWE4bceI9Vxl9zejI=", "no file"},
+	{command{args: []string{"beamtime_id"}},  false,false, "no secret parameter"},
+	{command{args: []string{"-secret","secret.tmp"}},  false,false, "no file"},
+	{command{args: []string{"-secret","not_existing_file","payload"}}, false, false, "no file"},
+	{command{args: []string{"-secret","secret.tmp","beamtime_id"}},false,  false, "type is missing"},
+	{command{args: []string{"-secret","secret.tmp","-types","read","beamtime_id"}}, false, false, "endpoint is missing"},
+	{command{args: []string{"-secret","secret.tmp","-types","read","-endpoint","endpoint","-token-details","beamtime_id"}},true,  true, "ok"},
+	{command{args: []string{"-secret","secret.tmp","-types","read","-endpoint","endpoint","beamtime_id"}},  false,true, "without details"},
 }
 
 func TestParseTokenFlags(t *testing.T) {
 
 	ioutil.WriteFile("secret.tmp", []byte("secret"), 0644)
-	outBuf = new(bytes.Buffer)
+
+	rest_client.Client = &mocks.MockClient{}
+
+
+	mocks.DoFunc = func(req *http.Request) (*http.Response, error) {
+		json := `{"Token":"blabla","Uri":"`+req.URL.Path+`"}`
+		r := ioutil.NopCloser(bytes.NewReader([]byte(json)))
+
+		return &http.Response{
+			StatusCode: 200,
+			Body:       r,
+		}, nil
+	}
+
 	for _, test := range tokenTests {
+		outBuf = new(bytes.Buffer)
 		err := test.cmd.CommandToken()
-		if err == nil {
-			assert.Contains(t, outBuf.(*bytes.Buffer).String(), test.answer, test.msg)
+		if test.ok {
+			assert.Nil(t, err, test.msg)
+			resp := struct {
+				Token string
+				Uri string
+			}{}
+			if test.withDetails {
+				err := json.Unmarshal(outBuf.(*bytes.Buffer).Bytes(),&resp)
+				assert.Nil(t, err, test.msg)
+				assert.Equal(t,  "blabla", resp.Token,test.msg)
+				assert.Equal(t, "endpoint/admin/issue",resp.Uri, test.msg)
+			} else {
+				assert.Equal(t,  "blabla\n", outBuf.(*bytes.Buffer).String(),test.msg)
+			}
 		} else {
-			assert.Contains(t, err.Error(), test.answer, test.msg)
+			assert.NotNil(t, err, test.msg)
 		}
 
 	}
diff --git a/asapo_tools/src/asapo_tools/go.mod b/asapo_tools/src/asapo_tools/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..85df870aee982144559bdda3bbaac0daa4b034b3
--- /dev/null
+++ b/asapo_tools/src/asapo_tools/go.mod
@@ -0,0 +1,12 @@
+module asapo_tools
+
+go 1.16
+
+replace asapo_common v0.0.0 => ../../../common/go/src/asapo_common
+
+require (
+	asapo_common v0.0.0
+	github.com/kr/pretty v0.2.0 // indirect
+	github.com/stretchr/testify v1.7.0
+	gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
+)
diff --git a/asapo_tools/src/asapo_tools/go.sum b/asapo_tools/src/asapo_tools/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..3e9dd5cbe821c458765373bc58e66d087906c706
--- /dev/null
+++ b/asapo_tools/src/asapo_tools/go.sum
@@ -0,0 +1,26 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
+github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/asapo_tools/src/asapo_tools/main/asapo.go b/asapo_tools/src/asapo_tools/main/asapo.go
index 66d5829ec67bd6b966a938b7940b3119505e9539..256dedcac06e54ee1289b56b66b631ab4fd16197 100644
--- a/asapo_tools/src/asapo_tools/main/asapo.go
+++ b/asapo_tools/src/asapo_tools/main/asapo.go
@@ -1,8 +1,10 @@
 package main
 
 import (
+	"asapo_tools/rest_client"
 	"flag"
 	"fmt"
+	"net/http"
 	"os"
 	"asapo_common/version"
 	"asapo_tools/cli"
@@ -20,6 +22,8 @@ func main() {
 
 	flag.Parse()
 
+	rest_client.Client = &http.Client{}
+
 	if *flHelp || flag.NArg() == 0 {
 		flag.Usage()
 		cli.PrintAllCommands()
diff --git a/asapo_tools/src/asapo_tools/mocks/mock_client.go b/asapo_tools/src/asapo_tools/mocks/mock_client.go
new file mode 100644
index 0000000000000000000000000000000000000000..0cf47f5ffa267a21ae472c614d4c8b01f15c81d7
--- /dev/null
+++ b/asapo_tools/src/asapo_tools/mocks/mock_client.go
@@ -0,0 +1,13 @@
+package mocks
+
+import "net/http"
+
+type MockClient struct {
+	DoFunc func(req *http.Request) (*http.Response, error)
+}
+
+func (m *MockClient) Do(req *http.Request) (*http.Response, error) {
+	return DoFunc(req)
+}
+
+var DoFunc func(req *http.Request) (*http.Response, error)
diff --git a/asapo_tools/src/asapo_tools/rest_client/rest_client.go b/asapo_tools/src/asapo_tools/rest_client/rest_client.go
new file mode 100644
index 0000000000000000000000000000000000000000..b303b59380056723d9a3d404d73c392edd8767dd
--- /dev/null
+++ b/asapo_tools/src/asapo_tools/rest_client/rest_client.go
@@ -0,0 +1,9 @@
+package rest_client
+
+import "net/http"
+
+type HTTPClient interface {
+	Do(req *http.Request) (*http.Response, error)
+}
+
+var Client HTTPClient
diff --git a/authorizer/CMakeLists.txt b/authorizer/CMakeLists.txt
index a48ac016d99a5f032de55f5f5a271bafe7aee03f..cc8384833523a807b0acb8c5b904c81003df3ffe 100644
--- a/authorizer/CMakeLists.txt
+++ b/authorizer/CMakeLists.txt
@@ -1,20 +1,8 @@
 set (TARGET_NAME asapo-authorizer)
 
-if (NOT "$ENV{GOPATH}" STREQUAL "")
-	set(GOPATH $ENV{GOPATH})
-endif()
-
-if (NOT GOPATH)
-    message (FATAL_ERROR "GOPATH not set")
-endif()
-
-message(STATUS "global gopath ${GOPATH}")
-
 IF(WIN32)
-    set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go")
     set (exe_name "${TARGET_NAME}.exe")
 ELSE()
-    set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go)
     set (exe_name "${TARGET_NAME}")
 ENDIF()
 
@@ -23,8 +11,8 @@ include(testing_go)
 configure_file(docker/Dockerfile . COPYONLY)
 
 add_custom_target(asapo-authorizer ALL
-    COMMAND  ${CMAKE_COMMAND} -E env GOPATH=${gopath}
-    go build ${GO_OPTS} -o ${exe_name} asapo_authorizer/main
+    COMMAND go build ${GO_OPTS} -o ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} main/authorizer.go
+    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_authorizer
     VERBATIM)
 define_property(TARGET PROPERTY EXENAME
         BRIEF_DOCS <executable name>
@@ -32,6 +20,4 @@ define_property(TARGET PROPERTY EXENAME
 
 set_target_properties(asapo-authorizer PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name})
 
-gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}" "./...")
-#go_integration_test(${TARGET_NAME}-connectdb "./..." "MongoDBConnect")
-#go_integration_test(${TARGET_NAME}-nextrecord "./..." "MongoDBNext")
+gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_authorizer" "./...")
diff --git a/authorizer/src/asapo_authorizer/authorization/authorization.go b/authorizer/src/asapo_authorizer/authorization/authorization.go
new file mode 100644
index 0000000000000000000000000000000000000000..1a0e85b25a6041b26e69a87705f36732f96a5838
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/authorization/authorization.go
@@ -0,0 +1,79 @@
+package authorization
+
+import (
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"encoding/json"
+	"github.com/rs/xid"
+	"time"
+)
+
+type Auth struct {
+	authUser  utils.Auth
+	authAdmin utils.Auth
+	authJWT   utils.Auth
+}
+
+func NewAuth(authUser,authAdmin,authJWT utils.Auth) *Auth {
+	return &Auth{authUser,authAdmin,authJWT}
+}
+
+func (auth *Auth) AdminAuth() utils.Auth {
+	return auth.authAdmin
+}
+
+func (auth *Auth) UserAuth() utils.Auth {
+	return auth.authUser
+}
+
+func (auth *Auth) JWTAuth() utils.Auth {
+	return auth.authJWT
+}
+
+func subjectFromRequest(request structs.IssueTokenRequest) string {
+	for key,value := range request.Subject {
+		switch key {
+		case "beamline":
+			return utils.SubjectFromBeamline(value)
+		case "beamtimeId":
+			return utils.SubjectFromBeamtime(value)
+		default:
+			return value
+		}
+	}
+	return ""
+}
+
+func (auth *Auth) PrepareAccessToken(request structs.IssueTokenRequest, userToken bool) (string, error) {
+	var claims utils.CustomClaims
+	var extraClaim structs.AccessTokenExtraClaim
+
+	claims.Subject = subjectFromRequest(request)
+
+	extraClaim.AccessTypes = request.AccessTypes
+	claims.ExtraClaims = &extraClaim
+	claims.SetExpiration(time.Duration(request.DaysValid*24) * time.Hour)
+	uid := xid.New()
+	claims.Id = uid.String()
+
+	if userToken {
+		return auth.UserAuth().GenerateToken(&claims)
+	} else {
+		return auth.AdminAuth().GenerateToken(&claims)
+	}
+}
+
+func UserTokenResponce(request structs.IssueTokenRequest, token string) []byte {
+	expires := ""
+	if request.DaysValid>0 {
+		expires = time.Now().Add(time.Duration(request.DaysValid*24) * time.Hour).UTC().Format(time.RFC3339)
+	}
+	answer := structs.IssueTokenResponse{
+		Token:       token,
+		AccessTypes: request.AccessTypes,
+		Sub:         subjectFromRequest(request),
+		Expires:     expires,
+	}
+	res, _ := json.Marshal(answer)
+	return res
+}
diff --git a/authorizer/src/asapo_authorizer/cli/cli.go b/authorizer/src/asapo_authorizer/cli/cli.go
new file mode 100644
index 0000000000000000000000000000000000000000..0851d568c782b2d97f80894b3afd280f0c5a1b64
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/cli.go
@@ -0,0 +1,60 @@
+// Package contains asapo commands that can be executed from command line.
+// Every CommandXxxx function that is a member of a cmd struct processes asapo xxxx command
+package cli
+
+import (
+	"errors"
+	"flag"
+	"fmt"
+	"io"
+	"os"
+	"reflect"
+	"strings"
+)
+
+var flHelp bool
+
+var outBuf io.Writer = os.Stdout
+
+func printHelp(f *flag.FlagSet) bool {
+	if flHelp {
+		f.Usage()
+		return true
+	} else {
+		return false
+	}
+}
+
+// DoCommand takes command name as a parameter and executes corresponding to this name cmd method
+func DoCommand(name string, args []string) error {
+	commandName := "Command" + strings.ToUpper(name[:1]) + strings.ToLower(name[1:])
+	cmd := new(command)
+	commandName = strings.ReplaceAll(commandName,"-","_")
+	methodVal := reflect.ValueOf(cmd).MethodByName(commandName)
+	if !methodVal.IsValid() {
+		return errors.New("wrong "+ProgramName+" command: " + name + "\nType '"+os.Args[0]+" -help'")
+	}
+	cmd.name = strings.ReplaceAll(name,"-","_")
+	cmd.args = args
+
+	method := methodVal.Interface().(func() error)
+
+	return method()
+}
+
+// PrintAllCommands prints all available commands (found wihtin methods of cmd)
+func PrintAllCommands() {
+	fmt.Fprintln(outBuf, "\nCommands:")
+	cmd := new(command)
+	CmdType := reflect.TypeOf(cmd)
+	for i := 0; i < CmdType.NumMethod(); i++ {
+		methodVal := CmdType.Method(i)
+		if strings.HasPrefix(methodVal.Name, "Command") {
+			method := methodVal.Func.Interface().(func(*command) error)
+			cmd.name = strings.ToLower(methodVal.Name)[7:]
+			cmd.name = strings.ReplaceAll(cmd.name,"_","-")
+			cmd.args = []string{"description"}
+			method(cmd)
+		}
+	}
+}
diff --git a/authorizer/src/asapo_authorizer/cli/command.go b/authorizer/src/asapo_authorizer/cli/command.go
new file mode 100644
index 0000000000000000000000000000000000000000..6f8fc4f556ff1e420de679d953eab89a3c194654
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/command.go
@@ -0,0 +1,43 @@
+package cli
+
+import (
+	"errors"
+	"flag"
+	"fmt"
+	"os"
+)
+
+var ProgramName string
+
+// A command consists of a command name and arguments, passed to this command (all after program name ...)
+type command struct {
+	name string
+	args []string
+}
+
+// description prints description line and returns true if first command argument is "description".
+func (cmd *command) description(d string) bool {
+	if len(cmd.args) == 1 && cmd.args[0] == "description" {
+		fmt.Fprintf(outBuf, "   %-10s %s\n", cmd.name, d)
+		return true
+	}
+	return false
+}
+
+func (cmd *command) errBadOptions(err string) error {
+	return errors.New(ProgramName + " " + cmd.name + ": " + err + "\nType '" +os.Args[0] +" " + cmd.name + " -help'")
+}
+
+// createDefaultFlagset creates new flagset and adds default help behaviour.
+func (cmd *command) createDefaultFlagset(description, args string) *flag.FlagSet {
+
+	flags := flag.NewFlagSet(cmd.name, flag.ContinueOnError)
+	flags.BoolVar(&flHelp, "help", false, "Print usage")
+	flags.Usage = func() {
+		fmt.Fprintf(outBuf, "Usage:\t\n"+ProgramName+" %s "+args, cmd.name)
+		fmt.Fprintf(outBuf, "\n\n%s\n", description)
+		flags.PrintDefaults()
+	}
+
+	return flags
+}
diff --git a/authorizer/src/asapo_authorizer/cli/command_test.go b/authorizer/src/asapo_authorizer/cli/command_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..dd457455f141710adbc4ffed143ad0bdc8d045a6
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/command_test.go
@@ -0,0 +1,41 @@
+package cli
+
+import (
+	"asapo_authorizer/authorization"
+	"asapo_authorizer/server"
+	"asapo_common/utils"
+	"bytes"
+	"testing"
+	"github.com/stretchr/testify/assert"
+)
+
+var CommandTests = []struct {
+	cmd    command
+	ok bool
+	msg string
+}{
+	{command{"create-token", []string{"-type", "user-token", "-beamtime","123","-access-types","read","-duration-days","1"}}, true,"ok"},
+	{command{"dummy", []string{"description"}}, false,"wrong command"},
+}
+
+func TestCommand(t *testing.T) {
+	outBuf = new(bytes.Buffer)
+	server.Auth = authorization.NewAuth(utils.NewJWTAuth("secret"),utils.NewJWTAuth("secret_admin"),utils.NewJWTAuth("secret"))
+
+	for _, test := range CommandTests {
+		outBuf.(*bytes.Buffer).Reset()
+		err := DoCommand(test.cmd.name, test.cmd.args)
+		if !test.ok {
+			assert.NotNil(t, err, "Should be error",test.msg)
+		} else {
+			assert.Nil(t, err, "Should be ok",test.msg)
+		}
+	}
+
+}
+
+func TestPrintAllCommands(t *testing.T) {
+	outBuf = new(bytes.Buffer)
+	PrintAllCommands()
+	assert.Contains(t, outBuf.(*bytes.Buffer).String(), "token", "all commands must have token")
+}
diff --git a/authorizer/src/asapo_authorizer/cli/create_token.go b/authorizer/src/asapo_authorizer/cli/create_token.go
new file mode 100644
index 0000000000000000000000000000000000000000..ba06241279ffc28371ed2021fd982ba11cfdd9c1
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/create_token.go
@@ -0,0 +1,133 @@
+package cli
+
+import (
+	"asapo_authorizer/authorization"
+	"asapo_authorizer/server"
+	"asapo_common/structs"
+	"errors"
+	"fmt"
+	"os"
+	"strings"
+)
+
+type tokenFlags struct {
+	Type       string
+	AccessType string
+	Beamtime   string
+	Beamline   string
+	DaysValid  int
+}
+
+func userTokenRequest(flags tokenFlags) (request structs.IssueTokenRequest, err error) {
+	if (flags.Beamline=="" && flags.Beamtime=="") || (flags.Beamline!="" && flags.Beamtime!="") {
+		return request,errors.New("beamtime or beamline must be set")
+	}
+
+	request.Subject = make(map[string]string,1)
+	if (flags.Beamline!="") {
+		request.Subject["beamline"]=flags.Beamline
+	} else {
+		request.Subject["beamtimeId"]=flags.Beamtime
+	}
+
+	request.AccessTypes = strings.Split(flags.AccessType,",")
+	for _,at:=range request.AccessTypes {
+		if at!="read" && at!="write" {
+			return request,errors.New("access type must be read of write")
+		}
+	}
+
+	request.DaysValid = flags.DaysValid
+
+	return
+}
+
+
+func adminTokenRequest(flags tokenFlags) (request structs.IssueTokenRequest, err error) {
+	if flags.Beamline+flags.Beamtime!="" {
+		return request,errors.New("beamtime and beamline must not be set for admin token")
+	}
+
+	request.AccessTypes = strings.Split(flags.AccessType,",")
+	for _,at:=range request.AccessTypes {
+		if at!="create" && at!="revoke" && at!="list" {
+			return request,errors.New("access type must be create,revoke of list")
+		}
+	}
+
+	request.Subject = make(map[string]string,1)
+	request.Subject["user"]="admin"
+	request.DaysValid = flags.DaysValid
+
+	return
+}
+
+func (cmd *command) CommandCreate_token() (err error) {
+	message_string := "Generate token"
+	if cmd.description(message_string) {
+		return nil
+	}
+
+	flags, err := cmd.parseTokenFlags(message_string)
+	if err != nil {
+		return err
+	}
+
+	request, userToken, err := getTokenRequest(flags)
+	if err != nil {
+		return err
+	}
+
+	token, err := server.Auth.PrepareAccessToken(request,userToken)
+	if err != nil {
+		return err
+	}
+
+	answer := authorization.UserTokenResponce(request, token)
+	fmt.Fprintf(outBuf, "%s\n", string(answer))
+	return nil
+}
+
+func getTokenRequest(flags tokenFlags) (request structs.IssueTokenRequest, userToken bool, err error) {
+	switch flags.Type {
+	case "user-token":
+		request, err = userTokenRequest(flags)
+		userToken = true
+	case "admin-token":
+		request, err = adminTokenRequest(flags)
+		userToken = false
+	default:
+		return structs.IssueTokenRequest{}, false, errors.New("wrong token type")
+	}
+	if err != nil {
+		return structs.IssueTokenRequest{},false,  err
+	}
+	return request, userToken, err
+}
+
+
+func (cmd *command) parseTokenFlags(message_string string) (tokenFlags, error) {
+
+	var flags tokenFlags
+	flagset := cmd.createDefaultFlagset(message_string, "")
+	flagset.StringVar(&flags.Type, "type", "", "token type")
+	flagset.StringVar(&flags.Beamtime, "beamtime", "", "beamtime for user token")
+	flagset.StringVar(&flags.Beamline, "beamline", "", "beamline for user token")
+	flagset.StringVar(&flags.AccessType, "access-types", "", "read/write for user token")
+	flagset.IntVar(&flags.DaysValid, "duration-days", 0, "token duration (in days)")
+
+
+	flagset.Parse(cmd.args)
+
+	if printHelp(flagset) {
+		os.Exit(0)
+	}
+
+	if flags.Type == "" {
+		return flags, errors.New("secret file missed ")
+	}
+
+
+	return flags, nil
+
+}
diff --git a/authorizer/src/asapo_authorizer/cli/create_token_test.go b/authorizer/src/asapo_authorizer/cli/create_token_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..ca16f199ed5c20d2a0f0946c76bd00c06d168778
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/create_token_test.go
@@ -0,0 +1,67 @@
+package cli
+
+import (
+	"asapo_authorizer/authorization"
+	"asapo_authorizer/server"
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"encoding/json"
+	"testing"
+
+	"bytes"
+	"github.com/stretchr/testify/assert"
+)
+
+var tokenTests = []struct {
+	cmd             command
+	key string
+	ok              bool
+	tokenAccessTypes []string
+	tokenSubject    string
+	tokenExpires bool
+	msg             string
+}{
+// good
+	{command{args: []string{"-type", "user-token", "-beamtime","123","-access-types","read","-duration-days","10"}},
+		"secret_user",true, []string{"read"}, "bt_123", true,"user token beamtime ok"},
+	{command{args: []string{"-type", "user-token", "-beamline","123","-access-types","read","-duration-days","10"}},
+		"secret_user",		true, []string{"read"}, "bl_123", true,"user token beamline ok"},
+	{command{args: []string{"-type", "admin-token","-access-types","create"}},
+		"secret_admin",true, []string{"create"}, "admin", false,"admin token ok"},
+// bad
+	{command{args: []string{"-type", "user-token", "-beamtime","123","-access-types","create","-duration-days","10"}},
+		"secret_user",false, nil, "", true,"user token wrong type"},
+	{command{args: []string{"-type", "user-token", "-access-types","create","-duration-days","10"}},
+		"secret_user",false, nil, "", true,"user token no beamtime or beamline"},
+	{command{args: []string{"-type", "user-token",  "-beamtime","123","-beamline","1234", "-access-types","create","-duration-days","10"}},
+		"secret_user",false, nil, "", true,"user token both beamtime and beamline"},
+	{command{args: []string{"-type", "admin-token","-access-types","bla"}},
+		"secret_admin",false, nil ,"", false,"admin token wrong type"},
+}
+
+func TestGenerateToken(t *testing.T) {
+	server.Auth = authorization.NewAuth(utils.NewJWTAuth("secret_user"),utils.NewJWTAuth("secret_admin"),utils.NewJWTAuth("secret"))
+	for _, test := range tokenTests {
+		outBuf = new(bytes.Buffer)
+		err := test.cmd.CommandCreate_token()
+		if !test.ok {
+			assert.NotNil(t, err, test.msg)
+			continue
+		}
+		assert.Nil(t, err, test.msg)
+		var token structs.IssueTokenResponse
+		json.Unmarshal(outBuf.(*bytes.Buffer).Bytes(), &token)
+
+		claims,_ := utils.CheckJWTToken(token.Token,test.key)
+		cclaims,_:= claims.(*utils.CustomClaims)
+		var extra_claim structs.AccessTokenExtraClaim
+		utils.MapToStruct(cclaims.ExtraClaims.(map[string]interface{}), &extra_claim)
+		assert.Equal(t, test.tokenSubject, cclaims.Subject, test.msg)
+		assert.Equal(t, test.tokenAccessTypes, extra_claim.AccessTypes, test.msg)
+		if test.tokenExpires {
+			assert.Equal(t, true, len(token.Expires)>0, test.msg)
+		} else {
+			assert.Empty(t, token.Expires, test.msg)
+		}
+	}
+}
diff --git a/authorizer/src/asapo_authorizer/cli/daemon.go b/authorizer/src/asapo_authorizer/cli/daemon.go
new file mode 100644
index 0000000000000000000000000000000000000000..46bf9a3e7bc4bdd1cb121bd21c819f21a9749921
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/cli/daemon.go
@@ -0,0 +1,18 @@
+package cli
+
+import (
+	"asapo_authorizer/server"
+)
+
+
+func (cmd *command) CommandDaemon() error {
+
+	message_string := "Start daemon (default)"
+	if cmd.description(message_string) {
+		return nil
+	}
+
+	server.Start()
+
+	return nil
+}
diff --git a/authorizer/src/asapo_authorizer/common/structs.go b/authorizer/src/asapo_authorizer/common/structs.go
new file mode 100644
index 0000000000000000000000000000000000000000..805d0c79aadd885ddb6ed76b1e678c10a63bbe45
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/common/structs.go
@@ -0,0 +1 @@
+package common
diff --git a/authorizer/src/asapo_authorizer/go.mod b/authorizer/src/asapo_authorizer/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..d509410645104648f2c4c28e5d7ebaafa45ee48c
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/go.mod
@@ -0,0 +1,16 @@
+module asapo_authorizer
+
+go 1.16
+
+replace asapo_common v0.0.0 => ../../../common/go/src/asapo_common
+
+require (
+	asapo_common v0.0.0
+	github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
+	github.com/go-ldap/ldap v3.0.3+incompatible
+	github.com/gorilla/mux v1.8.0 // indirect
+	github.com/rs/xid v1.2.1
+	github.com/sirupsen/logrus v1.8.0 // indirect
+	github.com/stretchr/testify v1.7.0
+	gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d // indirect
+)
diff --git a/authorizer/src/asapo_authorizer/go.sum b/authorizer/src/asapo_authorizer/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..4e1c32df6dd6e14736ec4b0bf11f74f6a56b7ca0
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/go.sum
@@ -0,0 +1,30 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/go-ldap/ldap v3.0.3+incompatible h1:HTeSZO8hWMS1Rgb2Ziku6b8a7qRIZZMHjsvuZyatzwk=
+github.com/go-ldap/ldap v3.0.3+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/magefile/mage v1.10.0 h1:3HiXzCUY12kh9bIuyXShaVe529fJfyqoVM42o/uom2g=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
+github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
+github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
+github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d h1:TxyelI5cVkbREznMhfzycHdkp5cLA7DpE+GKjSslYhM=
+gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/authorizer/src/asapo_authorizer/main/authorizer.go b/authorizer/src/asapo_authorizer/main/authorizer.go
index 9f4329909333d6dac5c88966ccbc398497856941..57261418da9c80661c09aa8a5d57db7c2ea693e1 100644
--- a/authorizer/src/asapo_authorizer/main/authorizer.go
+++ b/authorizer/src/asapo_authorizer/main/authorizer.go
@@ -3,19 +3,22 @@
 package main
 
 import (
-	log "asapo_common/logger"
+	"asapo_authorizer/cli"
 	"asapo_authorizer/server"
+	log "asapo_common/logger"
 	"asapo_common/version"
 	"flag"
 	"os"
 )
 
-func PrintUsage() {
-	log.Fatal("Usage: " + os.Args[0] + " -config <config file>")
-}
+var (
+	flHelp = flag.Bool("help", false, "Print usage")
+)
 
 func main() {
-	var fname = flag.String("config", "", "config file path")
+	cli.ProgramName = "asapo-authorizer"
+
+	var fname = flag.String("config", "", "config file path (mandatory)")
 
 	if ret := version.ShowVersion(os.Stdout, "ASAPO Authorizer"); ret {
 		return
@@ -24,10 +27,16 @@ func main() {
 	log.SetSoucre("authorizer")
 
 	flag.Parse()
-	if *fname == "" {
-		PrintUsage()
+	if *flHelp {
+		flag.Usage()
+		cli.PrintAllCommands()
+		return
 	}
 
+	if *fname=="" {
+		log.Fatal("config file path is missed")
+
+	}
 	logLevel, err := server.ReadConfig(*fname)
 	if err != nil {
 		log.Fatal(err.Error())
@@ -35,5 +44,12 @@ func main() {
 
 	log.SetLevel(logLevel)
 
-	server.Start()
+	if len(flag.Args()) == 0 {
+		server.Start()
+	}
+
+	if err := cli.DoCommand(flag.Arg(0), flag.Args()[1:]); err != nil {
+		log.Fatal(err.Error())
+	}
+
 }
diff --git a/authorizer/src/asapo_authorizer/server/authorize.go b/authorizer/src/asapo_authorizer/server/authorize.go
index edcf0b703767f142d746a56235155bef6b942e91..a70f03439d255d3151440803b48886f5e9a319a8 100644
--- a/authorizer/src/asapo_authorizer/server/authorize.go
+++ b/authorizer/src/asapo_authorizer/server/authorize.go
@@ -3,6 +3,7 @@ package server
 import (
 	"asapo_authorizer/common"
 	log "asapo_common/logger"
+	"asapo_common/structs"
 	"asapo_common/utils"
 	"errors"
 	"net/http"
@@ -126,6 +127,7 @@ func alwaysAllowed(creds SourceCredentials) (beamtimeMeta, bool) {
 		if pair.BeamtimeId == creds.BeamtimeId {
 			pair.DataSource = creds.DataSource
 			pair.Type = creds.Type
+			pair.AccessTypes = []string{"read","write"}
 			return pair, true
 		}
 	}
@@ -152,26 +154,31 @@ func needHostAuthorization(creds SourceCredentials) bool {
 	return creds.Type == "raw" || len(creds.Token) == 0
 }
 
-func authorizeByToken(creds SourceCredentials) error {
-	var token_expect string
-	if (creds.BeamtimeId != "auto") {
-		token_expect, _ = authHMAC.GenerateToken(&creds.BeamtimeId)
-	} else {
-		key := "bl_" + creds.Beamline
-		token_expect, _ = authHMAC.GenerateToken(&key)
+func checkToken(token string, subject_expect string) (accessTypes []string, err error) {
+	var extra_claim structs.AccessTokenExtraClaim
+	subject,err := Auth.UserAuth().CheckAndGetContent(token,&extra_claim)
+	if err!=nil {
+		return nil,err
 	}
 
-	var err_string string
-	if creds.Token != token_expect {
-		if creds.BeamtimeId != "auto" {
-			err_string = "wrong token for beamtime " + creds.BeamtimeId
-		} else {
-			err_string = "wrong token for beamline " + creds.Beamline
-		}
-		log.Error(err_string)
-		return errors.New(err_string)
+	if extra_claim.AccessTypes==nil || len(extra_claim.AccessTypes)==0 {
+		return nil,errors.New("missing access types")
 	}
-	return nil
+
+	if subject!=subject_expect {
+		return nil,errors.New("wrong token for "+subject_expect)
+	}
+	return extra_claim.AccessTypes,err
+}
+
+func authorizeByToken(creds SourceCredentials) (accessTypes []string, err error) {
+	subject_expect:=""
+	if (creds.BeamtimeId != "auto") {
+		subject_expect = utils.SubjectFromBeamtime(creds.BeamtimeId)
+	} else {
+		subject_expect = utils.SubjectFromBeamline(creds.Beamline)
+	}
+	return checkToken(creds.Token,subject_expect)
 }
 
 func findMeta(creds SourceCredentials) (beamtimeMeta, error) {
@@ -204,31 +211,30 @@ func findMeta(creds SourceCredentials) (beamtimeMeta, error) {
 	return meta, nil
 }
 
-func authorizeMeta(meta beamtimeMeta, request authorizationRequest, creds SourceCredentials) error {
-
+func authorizeMeta(meta beamtimeMeta, request authorizationRequest, creds SourceCredentials) (accessTypes []string, err error) {
+	accessTypes = nil
 	if creds.Type=="raw" && meta.OnlinePath=="" {
 		err_string := "beamtime "+meta.BeamtimeId+" is not online"
 		log.Error(err_string)
-		return errors.New(err_string)
+		return nil,errors.New(err_string)
 	}
 
 	if creds.Beamline != "auto" && meta.Beamline != creds.Beamline {
 		err_string := "given beamline (" + creds.Beamline + ") does not match the found one (" + meta.Beamline + ")"
 		log.Debug(err_string)
-		return errors.New(err_string)
+		return nil,errors.New(err_string)
 	}
 
 	if needHostAuthorization(creds) {
 		if err := authorizeByHost(request.OriginHost, meta.Beamline); err != nil {
-			return err
+			return nil,err
 		}
+		accessTypes = []string{"read","write"}
 	} else {
-		if err := authorizeByToken(creds); err != nil {
-			return err
-		}
+		accessTypes,err = authorizeByToken(creds)
 	}
 
-	return nil
+	return accessTypes,err
 }
 
 func authorize(request authorizationRequest, creds SourceCredentials) (beamtimeMeta, error) {
@@ -241,11 +247,14 @@ func authorize(request authorizationRequest, creds SourceCredentials) (beamtimeM
 		return beamtimeMeta{}, err
 	}
 
-	if err := authorizeMeta(meta, request, creds); err != nil {
+	var accessTypes []string
+	if accessTypes, err = authorizeMeta(meta, request, creds); err != nil {
 		return beamtimeMeta{}, err
 	}
 
-	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " + meta.Beamline+", type "+meta.Type)
+	meta.AccessTypes = accessTypes
+	log.Debug("authorized beamtime " + meta.BeamtimeId + " for " + request.OriginHost + " in " +
+		meta.Beamline+", type "+meta.Type)
 	return meta, nil
 }
 
diff --git a/authorizer/src/asapo_authorizer/server/authorize_test.go b/authorizer/src/asapo_authorizer/server/authorize_test.go
index 4085b1b5b473ebbae4a06998e73e509ddee12c9b..e4448cc69d89d815d326289bb1e85e2e36bd5b1d 100644
--- a/authorizer/src/asapo_authorizer/server/authorize_test.go
+++ b/authorizer/src/asapo_authorizer/server/authorize_test.go
@@ -1,8 +1,10 @@
 package server
 
 import (
+	"asapo_authorizer/authorization"
 	"asapo_authorizer/common"
 	"asapo_authorizer/ldap_client"
+	"asapo_common/structs"
 	"asapo_common/utils"
 	"github.com/stretchr/testify/assert"
 	"io/ioutil"
@@ -14,9 +16,27 @@ import (
 	"testing"
 )
 
-func prepareToken(beamtime_or_beamline string) string{
-	authHMAC = utils.NewHMACAuth("secret")
-	token, _ := authHMAC.GenerateToken(&beamtime_or_beamline)
+
+func prepareUserToken(payload string, accessTypes []string) string{
+	auth := authorization.NewAuth(nil,utils.NewJWTAuth("secret_user"),nil)
+	var claims utils.CustomClaims
+	var extraClaim structs.AccessTokenExtraClaim
+	claims.Subject = payload
+	extraClaim.AccessTypes = accessTypes
+	claims.ExtraClaims = &extraClaim
+	token, _ := auth.AdminAuth().GenerateToken(&claims)
+	return token
+}
+
+func prepareAdminToken(payload string) string{
+	auth:= authorization.NewAuth(nil,utils.NewJWTAuth("secret_admin"),nil)
+
+	var claims utils.CustomClaims
+	var extraClaim structs.AccessTokenExtraClaim
+	claims.Subject = payload
+	extraClaim.AccessTypes = []string{"create"}
+	claims.ExtraClaims = &extraClaim
+	token, _ := auth.AdminAuth().GenerateToken(&claims)
 	return token
 }
 
@@ -44,9 +64,12 @@ func makeRequest(request interface{}) string {
 	return string(buf)
 }
 
-func doPostRequest(path string,buf string) *httptest.ResponseRecorder {
+func doPostRequest(path string,buf string,authHeader string) *httptest.ResponseRecorder {
 	mux := utils.NewRouter(listRoutes)
 	req, _ := http.NewRequest("POST", path, strings.NewReader(buf))
+	if authHeader!="" {
+		req.Header.Add("Authorization",authHeader)
+	}
 	w := httptest.NewRecorder()
 	mux.ServeHTTP(w, req)
 	return w
@@ -86,9 +109,9 @@ func TestSplitCreds(t *testing.T) {
 }
 
 func TestAuthorizeDefaultOK(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf",""}})
+	allowBeamlines([]beamtimeMeta{{"asapo_test","beamline","","2019","tf","",nil}})
 	request :=  makeRequest(authorizationRequest{"processed%asapo_test%%%","host"})
-	w := doPostRequest("/authorize",request)
+	w := doPostRequest("/authorize",request,"")
 
 	body, _ := ioutil.ReadAll(w.Body)
 
@@ -157,38 +180,42 @@ var authTests = [] struct {
 	message string
 	answer string
 }{
-	{"processed","test","auto","dataSource", prepareToken("test"),"127.0.0.2",http.StatusOK,"user source with correct token",
-		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
-	{"processed","test_online","auto","dataSource", prepareToken("test_online"),"127.0.0.1",http.StatusOK,"with online path, processed type",
-		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"","source-type":"processed"}`},
-	{"processed","test1","auto","dataSource", prepareToken("test1"),"127.0.0.1",http.StatusUnauthorized,"correct token, beamtime not found",
+	{"processed","test","auto","dataSource", prepareUserToken("bt_test",nil),"127.0.0.2",http.StatusUnauthorized,"missing access types",
+		""},
+	{"processed","test","auto","dataSource", prepareUserToken("bt_test",[]string{}),"127.0.0.2",http.StatusUnauthorized,"empty access types",
+		""},
+	{"processed","test","auto","dataSource", prepareUserToken("bt_test",[]string{"write"}),"127.0.0.2",http.StatusOK,"user source with correct token",
+		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed","access-types":["write"]}`},
+	{"processed","test_online","auto","dataSource", prepareUserToken("bt_test_online",[]string{"read"}),"127.0.0.1",http.StatusOK,"with online path, processed type",
+		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"","source-type":"processed","access-types":["read"]}`},
+	{"processed","test1","auto","dataSource", prepareUserToken("bt_test1",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"correct token, beamtime not found",
 		""},
-	{"processed","test","auto","dataSource", prepareToken("wrong"),"127.0.0.1",http.StatusUnauthorized,"user source with wrong token",
+	{"processed","test","auto","dataSource", prepareUserToken("wrong",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"user source with wrong token",
 		""},
-	{"processed","test","bl1","dataSource", prepareToken("test"),"127.0.0.1",http.StatusOK,"correct beamline given",
-		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
-		{"processed","test","bl2","dataSource", prepareToken("test"),"127.0.0.1",http.StatusUnauthorized,"incorrect beamline given",
+	{"processed","test","bl1","dataSource", prepareUserToken("bt_test",[]string{"read"}),"127.0.0.1",http.StatusOK,"correct beamline given",
+		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed","access-types":["read"]}`},
+		{"processed","test","bl2","dataSource", prepareUserToken("bt_test",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"incorrect beamline given",
 		""},
-	{"processed","auto","p07", "dataSource",prepareToken("bl_p07"),"127.0.0.1",http.StatusOK,"beamtime found",
-		`{"beamtimeId":"11111111","beamline":"p07","dataSource":"dataSource","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"","source-type":"processed"}`},
-	{"processed","auto","p07", "dataSource",prepareToken("bl_p06"),"127.0.0.1",http.StatusUnauthorized,"wrong token",
+	{"processed","auto","p07", "dataSource", prepareUserToken("bl_p07",[]string{"read"}),"127.0.0.1",http.StatusOK,"beamtime found",
+		`{"beamtimeId":"11111111","beamline":"p07","dataSource":"dataSource","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"","source-type":"processed","access-types":["read"]}`},
+	{"processed","auto","p07", "dataSource", prepareUserToken("bl_p06",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"wrong token",
 		""},
-	{"processed","auto","p08", "dataSource",prepareToken("bl_p08"),"127.0.0.1",http.StatusUnauthorized,"beamtime not found",
+	{"processed","auto","p08", "dataSource", prepareUserToken("bl_p08",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"beamtime not found",
 		""},
-	{"raw","test_online","auto","dataSource", prepareToken("test_online"),"127.0.0.1",http.StatusOK,"raw type",
-		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw"}`},
 	{"raw","test_online","auto","dataSource", "","127.0.0.1",http.StatusOK,"raw type",
-		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw"}`},
+		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw","access-types":["read","write"]}`},
+	{"raw","test_online","auto","dataSource", "","127.0.0.1",http.StatusOK,"raw type",
+		`{"beamtimeId":"test_online","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test_online","beamline-path":"./bl1/current","source-type":"raw","access-types":["read","write"]}`},
  	{"raw","auto","p07","dataSource", "","127.0.0.1",http.StatusOK,"raw type, auto beamtime",
-		`{"beamtimeId":"11111111","beamline":"p07","dataSource":"dataSource","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"./p07/current","source-type":"raw"}`},
+		`{"beamtimeId":"11111111","beamline":"p07","dataSource":"dataSource","core-path":"asap3/petra3/gpfs/p07/2020/data/11111111","beamline-path":"./p07/current","source-type":"raw","access-types":["read","write"]}`},
 	{"raw","auto","p07","noldap", "","127.0.0.1",http.StatusNotFound,"no conection to ldap",
 		""},
 	{"raw","test_online","auto","dataSource", "","127.0.0.2",http.StatusUnauthorized,"raw type, wrong origin host",
 		""},
-	{"raw","test","auto","dataSource", prepareToken("test"),"127.0.0.1",http.StatusUnauthorized,"raw when not online",
+	{"raw","test","auto","dataSource", prepareUserToken("bt_test",[]string{"read"}),"127.0.0.1",http.StatusUnauthorized,"raw when not online",
 		""},
 	{"processed","test","auto","dataSource", "","127.0.0.1:1001",http.StatusOK,"processed without token",
-		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed"}`},
+		`{"beamtimeId":"test","beamline":"bl1","dataSource":"dataSource","core-path":"./tf/gpfs/bl1/2019/data/test","beamline-path":"","source-type":"processed","access-types":["read","write"]}`},
 	{"processed","test","auto","dataSource", "","127.0.0.2",http.StatusUnauthorized,"processed without token, wrong host",
 		""},
 }
@@ -196,7 +223,7 @@ var authTests = [] struct {
 func TestAuthorize(t *testing.T) {
 	ldapClient = mockClient
 	allowBeamlines([]beamtimeMeta{})
-
+	Auth = authorization.NewAuth(utils.NewJWTAuth("secret_user"),utils.NewJWTAuth("secret_admin"),utils.NewJWTAuth("secret"))
 	expected_uri := "expected_uri"
 	expected_base := "expected_base"
 	allowed_ips := []string{"127.0.0.1"}
@@ -231,7 +258,7 @@ func TestAuthorize(t *testing.T) {
 		}
 
 		request :=  makeRequest(authorizationRequest{test.source_type+"%"+test.beamtime_id+"%"+test.beamline+"%"+test.dataSource+"%"+test.token,test.originHost})
-		w := doPostRequest("/authorize",request)
+		w := doPostRequest("/authorize",request,"")
 
 		body, _ := ioutil.ReadAll(w.Body)
 		if test.status==http.StatusOK {
@@ -248,24 +275,24 @@ func TestAuthorize(t *testing.T) {
 
 func TestNotAuthorized(t *testing.T) {
 	request :=  makeRequest(authorizationRequest{"raw%any_id%%%","host"})
-	w := doPostRequest("/authorize",request)
+	w := doPostRequest("/authorize",request,"")
 	assert.Equal(t, http.StatusUnauthorized, w.Code, "")
 }
 
 
 func TestAuthorizeWrongRequest(t *testing.T) {
-	w := doPostRequest("/authorize","babla")
+	w := doPostRequest("/authorize","babla","")
 	assert.Equal(t, http.StatusBadRequest, w.Code, "")
 }
 
 
 func TestAuthorizeWrongPath(t *testing.T) {
-	w := doPostRequest("/authorized","")
+	w := doPostRequest("/authorized","","")
 	assert.Equal(t, http.StatusNotFound, w.Code, "")
 }
 
 func TestDoNotAuthorizeIfNotInAllowed(t *testing.T) {
-	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf",""}})
+	allowBeamlines([]beamtimeMeta{{"test","beamline","","2019","tf","",nil}})
 
 	request :=  authorizationRequest{"asapo_test%%","host"}
 	creds,_ := getSourceCredentials(request)
diff --git a/authorizer/src/asapo_authorizer/server/folder_token.go b/authorizer/src/asapo_authorizer/server/folder_token.go
index bb69d5b9b34a56169a596c8c2cbec13db7fc6fd7..6b80193478187250f1d14df00f754bcd6bde5181 100644
--- a/authorizer/src/asapo_authorizer/server/folder_token.go
+++ b/authorizer/src/asapo_authorizer/server/folder_token.go
@@ -1,7 +1,9 @@
 package server
 
 import (
+	"asapo_common/structs"
 	"asapo_common/utils"
+	"asapo_common/version"
 	"net/http"
 	"time"
 	log "asapo_common/logger"
@@ -10,71 +12,63 @@ import (
 )
 
 type folderTokenRequest struct {
-	Folder string
+	Folder     string
 	BeamtimeId string
 	Token      string
 }
 
 type folderToken struct {
-	Token      string
+	Token string
 }
 
 /*func routeFolderToken(w http.ResponseWriter, r *http.Request) {
 	utils.ProcessJWTAuth(processFolderTokenRequest,settings.secret)(w,r)
 }*/
 
-func prepareJWTToken(request folderTokenRequest) (string,error) {
+func prepareJWTToken(request folderTokenRequest) (string, error) {
 	var claims utils.CustomClaims
-	var extraClaim utils.FolderTokenTokenExtraClaim
+	var extraClaim structs.FolderTokenTokenExtraClaim
 
 	extraClaim.RootFolder = request.Folder
 	claims.ExtraClaims = &extraClaim
-	claims.Duration = time.Duration(settings.TokenDurationMin) * time.Minute
-
-	return authJWT.GenerateToken(&claims)
+	claims.SetExpiration(time.Duration(settings.FolderTokenDurationMin) * time.Minute)
+	return Auth.JWTAuth().GenerateToken(&claims)
 
 }
 
-func folderTokenResponce(token string) []byte{
+func folderTokenResponce(token string) []byte {
 	return []byte(token)
 }
 
 func checkBeamtimeToken(request folderTokenRequest) error {
-	token_expect, _ := authHMAC.GenerateToken(&request.BeamtimeId)
-	var err_string string
-	if request.Token != token_expect {
-		err_string = "wrong token for beamtime " + request.BeamtimeId
-		log.Error(err_string)
-		return errors.New(err_string)
-	}
-	return nil
+	_, err := checkToken(request.Token, utils.SubjectFromBeamtime(request.BeamtimeId))
+	return err
 }
 
-
-func extractFolderTokenrequest(r *http.Request) (folderTokenRequest,error) {
+func extractFolderTokenrequest(r *http.Request) (folderTokenRequest, error) {
 	var request folderTokenRequest
-	err := utils.ExtractRequest(r,&request)
+	err := utils.ExtractRequest(r, &request)
 	if err != nil {
-		return folderTokenRequest{},err
+		return folderTokenRequest{}, err
 	}
 
-	if len(request.Folder)==0 ||len(request.BeamtimeId)==0 || len(request.Token) == 0 {
-		return folderTokenRequest{},errors.New("some request fields are empty")
+	if len(request.Folder) == 0 || len(request.BeamtimeId) == 0 || len(request.Token) == 0 {
+		return folderTokenRequest{}, errors.New("some request fields are empty")
 	}
-	return request,nil
+	return request, nil
 
 }
 
 func checkBeamtimeFolder(request folderTokenRequest) error {
-	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId,"auto","","",""})
+	beamtimeMeta, err := findMeta(SourceCredentials{request.BeamtimeId, "auto", "", "", ""})
 	if err != nil {
-		log.Error("cannot get beamtime meta"+err.Error())
+		log.Error("cannot get beamtime meta" + err.Error())
 		return err
 	}
 
 	folder := filepath.Clean(request.Folder)
-	if (folder != filepath.Clean(beamtimeMeta.OnlinePath) && folder != filepath.Clean(beamtimeMeta.OfflinePath)) {
-		err_string := folder + " does not match beamtime folders "+beamtimeMeta.OnlinePath+" or " +beamtimeMeta.OfflinePath
+	if folder != filepath.Clean(beamtimeMeta.OnlinePath) && folder != filepath.Clean(beamtimeMeta.OfflinePath) {
+		err_string := folder + " does not match beamtime folders " + beamtimeMeta.OnlinePath + " or " + beamtimeMeta.OfflinePath
 		log.Error(err_string)
 		return errors.New(err_string)
 	}
@@ -82,28 +76,37 @@ func checkBeamtimeFolder(request folderTokenRequest) error {
 	return nil
 }
 
+func checkAuthorizerApiVersion(w http.ResponseWriter, r *http.Request) bool {
+	_, ok := utils.PrecheckApiVersion(w, r, version.GetAuthorizerApiVersion())
+	return ok
+}
+
 func routeFolderToken(w http.ResponseWriter, r *http.Request) {
+	if ok := checkAuthorizerApiVersion(w, r); !ok {
+		return
+	}
+
 	request, err := extractFolderTokenrequest(r)
 	if err != nil {
-		utils.WriteServerError(w,err,http.StatusBadRequest)
+		utils.WriteServerError(w, err, http.StatusBadRequest)
 		return
 	}
 
 	err = checkBeamtimeToken(request)
 	if err != nil {
-		utils.WriteServerError(w,err,http.StatusUnauthorized)
+		utils.WriteServerError(w, err, http.StatusUnauthorized)
 		return
 	}
 
 	err = checkBeamtimeFolder(request)
 	if err != nil {
-		utils.WriteServerError(w,err,http.StatusUnauthorized)
+		utils.WriteServerError(w, err, http.StatusUnauthorized)
 		return
 	}
 
 	token, err := prepareJWTToken(request)
 	if err != nil {
-		utils.WriteServerError(w,err,http.StatusInternalServerError)
+		utils.WriteServerError(w, err, http.StatusInternalServerError)
 		return
 	}
 
diff --git a/authorizer/src/asapo_authorizer/server/folder_token_test.go b/authorizer/src/asapo_authorizer/server/folder_token_test.go
index dbd48e2c42ad871d4157cf9b97e9af8677de9d99..ee9ab2f5088474e753e811c6440196bfa6f2d0ba 100644
--- a/authorizer/src/asapo_authorizer/server/folder_token_test.go
+++ b/authorizer/src/asapo_authorizer/server/folder_token_test.go
@@ -1,6 +1,8 @@
 package server
 
 import (
+	"asapo_authorizer/authorization"
+	"asapo_common/structs"
 	"asapo_common/utils"
 	"github.com/stretchr/testify/assert"
 	"io/ioutil"
@@ -18,19 +20,21 @@ var  fodlerTokenTests = [] struct {
 	status int
 	message string
 }{
-	{"test", "tf/gpfs/bl1/2019/data/test",prepareToken("test"),http.StatusOK,"beamtime found"},
-	{"test_online", "bl1/current",prepareToken("test_online"),http.StatusOK,"online beamtime found"},
-	{"test", "bl1/current",prepareToken("test"),http.StatusUnauthorized,"no online beamtime found"},
-	{"test_online", "bl2/current",prepareToken("test_online"),http.StatusUnauthorized,"wrong online folder"},
-	{"test", "tf/gpfs/bl1/2019/data/test1",prepareToken("test"),http.StatusUnauthorized,"wrong folder"},
-	{"test", "tf/gpfs/bl1/2019/data/test",prepareToken("test1"),http.StatusUnauthorized,"wrong token"},
-	{"11111111", "tf/gpfs/bl1/2019/data/test",prepareToken("11111111"),http.StatusBadRequest,"bad request"},
+	{"test", "tf/gpfs/bl1/2019/data/test", prepareUserToken("bt_test",[]string{"read"}),http.StatusOK,"beamtime found"},
+	{"test_online", "bl1/current", prepareUserToken("bt_test_online",[]string{"read"}),http.StatusOK,"online beamtime found"},
+	{"test", "bl1/current", prepareUserToken("bt_test",[]string{"read"}),http.StatusUnauthorized,"no online beamtime found"},
+	{"test_online", "bl2/current", prepareUserToken("bt_test_online",[]string{"read"}),http.StatusUnauthorized,"wrong online folder"},
+	{"test", "tf/gpfs/bl1/2019/data/test1", prepareUserToken("bt_test",[]string{"read"}),http.StatusUnauthorized,"wrong folder"},
+	{"test", "tf/gpfs/bl1/2019/data/test", prepareUserToken("bt_test1",[]string{"read"}),http.StatusUnauthorized,"wrong token"},
+	{"11111111", "tf/gpfs/bl1/2019/data/test", prepareUserToken("bt_11111111",[]string{"read"}),http.StatusBadRequest,"bad request"},
 }
 
 func TestFolderToken(t *testing.T) {
 	allowBeamlines([]beamtimeMeta{})
 	settings.RootBeamtimesFolder ="."
 	settings.CurrentBeamlinesFolder="."
+	Auth = authorization.NewAuth(utils.NewJWTAuth("secret_user"),utils.NewJWTAuth("secret_admin"),utils.NewJWTAuth("secret_folder"))
+
 	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test"), os.ModePerm)
 	os.MkdirAll(filepath.Clean("tf/gpfs/bl1/2019/data/test_online"), os.ModePerm)
 
@@ -41,17 +45,16 @@ func TestFolderToken(t *testing.T) {
 	defer 	os.RemoveAll("bl1")
 
 	for _, test := range fodlerTokenTests {
-		authJWT = utils.NewJWTAuth("secret")
 		abs_path:=settings.RootBeamtimesFolder + string(filepath.Separator)+test.root_folder
 		request :=  makeRequest(folderTokenRequest{abs_path,test.beamtime_id,test.token})
 		if test.status == http.StatusBadRequest {
 			request =makeRequest(authorizationRequest{})
 		}
-		w := doPostRequest("/folder",request)
+		w := doPostRequest("/v0.1/folder",request,"")
 		if w.Code == http.StatusOK {
 			body, _ := ioutil.ReadAll(w.Body)
-			claims,_ := utils.CheckJWTToken(string(body),"secret")
-			var extra_claim utils.FolderTokenTokenExtraClaim
+			claims,_ := utils.CheckJWTToken(string(body),"secret_folder")
+			var extra_claim structs.FolderTokenTokenExtraClaim
 			utils.MapToStruct(claims.(*utils.CustomClaims).ExtraClaims.(map[string]interface{}), &extra_claim)
 			assert.Equal(t, abs_path, extra_claim.RootFolder, test.message)
 		} else {
@@ -63,3 +66,11 @@ func TestFolderToken(t *testing.T) {
 	}
 }
 
+func TestFolderTokenWrongProtocol(t *testing.T) {
+		request :=  makeRequest(folderTokenRequest{"abs_path","beamtime_id","token"})
+		w := doPostRequest("/v0.2/folder",request,"")
+		assert.Equal(t, http.StatusUnsupportedMediaType, w.Code, "wrong protocol")
+}
+
+
+
diff --git a/authorizer/src/asapo_authorizer/server/introspect.go b/authorizer/src/asapo_authorizer/server/introspect.go
new file mode 100644
index 0000000000000000000000000000000000000000..1cc6bd37add60a6f8604ef0c47bc91b7eecb5345
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/server/introspect.go
@@ -0,0 +1,48 @@
+package server
+
+import (
+	log "asapo_common/logger"
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"encoding/json"
+	"net/http"
+)
+
+func extractToken(r *http.Request) (string, error) {
+	var request structs.IntrospectTokenRequest
+	err := utils.ExtractRequest(r, &request)
+	if err != nil {
+		return "", err
+	}
+	return request.Token, nil
+}
+
+func verifyUserToken(token string) (response structs.IntrospectTokenResponse, err error) {
+	var extra_claim structs.AccessTokenExtraClaim
+	response.Sub,err = Auth.UserAuth().CheckAndGetContent(token,&extra_claim)
+	if err!=nil {
+		return
+	}
+	response.AccessTypes = extra_claim.AccessTypes
+	return
+}
+
+func routeIntrospect(w http.ResponseWriter, r *http.Request) {
+	token, err := extractToken(r)
+	if err != nil {
+		utils.WriteServerError(w, err, http.StatusBadRequest)
+		return
+	}
+
+	response,err := verifyUserToken(token)
+	if err != nil {
+		utils.WriteServerError(w, err, http.StatusUnauthorized)
+		return
+	}
+
+	log.Debug("verified user token for "+response.Sub)
+
+	answer,_ := json.Marshal(&response)
+	w.WriteHeader(http.StatusOK)
+	w.Write(answer)
+}
diff --git a/authorizer/src/asapo_authorizer/server/introspect_test.go b/authorizer/src/asapo_authorizer/server/introspect_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..a9827bea06e6eba2c8d842def06037e86a9d896d
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/server/introspect_test.go
@@ -0,0 +1,51 @@
+package server
+
+import (
+	"asapo_authorizer/authorization"
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"encoding/json"
+	"fmt"
+	"github.com/stretchr/testify/assert"
+	"io/ioutil"
+	"net/http"
+	"testing"
+)
+
+var  IntrospectTests = [] struct {
+	tokenSubject string
+	roles []string
+	status int
+	message string
+}{
+	{"bt_test",[]string{"read"},http.StatusOK,"valid token"},
+	{"",nil,http.StatusUnauthorized,"invalid token"},
+
+}
+
+func TestIntrospect(t *testing.T) {
+	authJWT := utils.NewJWTAuth("secret")
+	authAdmin := utils.NewJWTAuth("secret_admin")
+	authUser := utils.NewJWTAuth("secret_user")
+	Auth = authorization.NewAuth(authUser,authAdmin,authJWT)
+	for _, test := range IntrospectTests {
+		token := prepareUserToken(test.tokenSubject,test.roles)
+		if test.status==http.StatusUnauthorized {
+			token = "blabla"
+		}
+		request :=  makeRequest(structs.IntrospectTokenRequest{token})
+		w := doPostRequest("/introspect",request,"")
+		assert.Equal(t, test.status , w.Code, test.message)
+		if test.status == http.StatusOK {
+			body, _ := ioutil.ReadAll(w.Body)
+			var token structs.IntrospectTokenResponse
+			json.Unmarshal(body,&token)
+			assert.Equal(t, token.Sub , test.tokenSubject, test.message)
+			assert.Equal(t, token.AccessTypes, test.roles, test.message)
+		} else {
+			body, _ := ioutil.ReadAll(w.Body)
+			fmt.Println(string(body))
+		}
+	}
+}
+
diff --git a/authorizer/src/asapo_authorizer/server/issue_token.go b/authorizer/src/asapo_authorizer/server/issue_token.go
new file mode 100644
index 0000000000000000000000000000000000000000..7a332ad935d23c502514e98b440d9239afb4e560
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/server/issue_token.go
@@ -0,0 +1,76 @@
+package server
+
+import (
+	"asapo_authorizer/authorization"
+	log "asapo_common/logger"
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"errors"
+	"net/http"
+)
+
+func extractUserTokenrequest(r *http.Request) (request structs.IssueTokenRequest, err error) {
+	err = utils.ExtractRequest(r, &request)
+	if err != nil {
+		return request, err
+	}
+
+	if request.Subject["beamtimeId"] == "" && request.Subject["beamline"] == "" {
+		return request, errors.New("missing beamtime/beamline")
+	}
+
+	if request.Subject["beamtimeId"] != "" && request.Subject["beamline"] != "" {
+		return request, errors.New("set only one of beamtime/beamline")
+	}
+
+	if request.DaysValid <= 0 {
+		return request, errors.New("set token valid period")
+	}
+
+	for _, ar := range request.AccessTypes {
+		if ar != "read" && ar != "write" {
+			return request, errors.New("wrong requested access rights: "+ar)
+		}
+	}
+
+	return request, nil
+}
+
+func routeAuthorisedTokenIssue(w http.ResponseWriter, r *http.Request) {
+	Auth.AdminAuth().ProcessAuth(checkAccessToken, "admin")(w, r)
+}
+func checkAccessToken(w http.ResponseWriter, r *http.Request) {
+	var extraClaim structs.AccessTokenExtraClaim
+	var claims *utils.CustomClaims
+	if err := utils.JobClaimFromContext(r, &claims, &extraClaim); err != nil {
+		w.WriteHeader(http.StatusInternalServerError)
+		w.Write([]byte(err.Error()))
+	}
+	if claims.Subject != "admin" || !utils.StringInSlice("create",extraClaim.AccessTypes) {
+		err_txt := "wrong token claims"
+		w.WriteHeader(http.StatusUnauthorized)
+		w.Write([]byte(err_txt))
+	}
+
+	issueUserToken(w, r)
+}
+
+func issueUserToken(w http.ResponseWriter, r *http.Request) {
+	request, err := extractUserTokenrequest(r)
+	if err != nil {
+		utils.WriteServerError(w, err, http.StatusBadRequest)
+		return
+	}
+
+	token, err := Auth.PrepareAccessToken(request, true)
+	if err != nil {
+		utils.WriteServerError(w, err, http.StatusInternalServerError)
+		return
+	}
+
+	log.Debug("generated user token ")
+
+	answer := authorization.UserTokenResponce(request, token)
+	w.WriteHeader(http.StatusOK)
+	w.Write(answer)
+}
diff --git a/authorizer/src/asapo_authorizer/server/issue_token_test.go b/authorizer/src/asapo_authorizer/server/issue_token_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..5ed8c9b6a865449ecbda9f61f1c31183f3eb8d93
--- /dev/null
+++ b/authorizer/src/asapo_authorizer/server/issue_token_test.go
@@ -0,0 +1,65 @@
+package server
+
+import (
+	"asapo_authorizer/authorization"
+	"asapo_common/structs"
+	"asapo_common/utils"
+	"encoding/json"
+	"fmt"
+	"github.com/stretchr/testify/assert"
+	"io/ioutil"
+	"net/http"
+	"testing"
+	"time"
+)
+
+var  IssueTokenTests = [] struct {
+	requestSubject map[string]string
+	tokenSubject   string
+	roles          []string
+	validDays      int
+	adminToken     string
+	resToken       string
+	status         int
+	message        string
+}{
+	{map[string]string{"beamtimeId":"test"},"bt_test",[]string{"read"},180,prepareAdminToken("admin"),"aaa",http.StatusOK,"read for beamtime"},
+	{map[string]string{"beamtimeId":"test"},"bt_test",[]string{"read"},90,prepareAdminToken("admin"),"aaa",http.StatusOK,"write for beamtime"},
+	{map[string]string{"beamline":"test"},"bl_test",[]string{"read"},180,prepareAdminToken("admin"),"aaa",http.StatusOK,"read for beamline"},
+	{map[string]string{"blabla":"test"},"",[]string{"read"},180,prepareAdminToken("admin"),"",http.StatusBadRequest,"beamline or beamtime not given"},
+	{map[string]string{"beamtimeId":"test"},"",[]string{"bla"},180,prepareAdminToken("admin"),"",http.StatusBadRequest,"wrong role"},
+	{map[string]string{"beamtimeId":"test"},"",[]string{"read"},180,prepareAdminToken("bla"),"",http.StatusUnauthorized,"wrong admin token"},
+	{map[string]string{"beamtimeId":"test"},"bt_test",[]string{"read"},0,prepareAdminToken("admin"),"aaa",http.StatusBadRequest,"0 valid days"},
+
+}
+
+func TestIssueToken(t *testing.T) {
+	authJWT := utils.NewJWTAuth("secret")
+	authAdmin := utils.NewJWTAuth("secret_admin")
+	authUser := utils.NewJWTAuth("secret_user")
+	Auth = authorization.NewAuth(authUser,authAdmin,authJWT)
+	for _, test := range IssueTokenTests {
+		request :=  makeRequest(structs.IssueTokenRequest{test.requestSubject,test.validDays,test.roles})
+		w := doPostRequest("/admin/issue",request,authAdmin.Name()+" "+test.adminToken)
+		if w.Code == http.StatusOK {
+			body, _ := ioutil.ReadAll(w.Body)
+			var token structs.IssueTokenResponse
+			json.Unmarshal(body,&token)
+			claims,_ := utils.CheckJWTToken(token.Token,"secret_user")
+			cclaims,_:= claims.(*utils.CustomClaims)
+			var extra_claim structs.AccessTokenExtraClaim
+			utils.MapToStruct(claims.(*utils.CustomClaims).ExtraClaims.(map[string]interface{}), &extra_claim)
+			assert.Equal(t, cclaims.Subject , test.tokenSubject, test.message)
+			assert.True(t, cclaims.ExpiresAt-time.Now().Unix()>int64(test.validDays)*24*60*60-10, test.message)
+			assert.True(t, cclaims.ExpiresAt-time.Now().Unix()<int64(test.validDays)*24*60*60+10, test.message)
+			assert.Equal(t, extra_claim.AccessTypes, test.roles, test.message)
+			assert.NotEmpty(t, cclaims.Id , test.message)
+		} else {
+			body, _ := ioutil.ReadAll(w.Body)
+			fmt.Println(string(body))
+		}
+
+		assert.Equal(t, test.status, w.Code, test.message)
+	}
+}
+
diff --git a/authorizer/src/asapo_authorizer/server/listroutes.go b/authorizer/src/asapo_authorizer/server/listroutes.go
index 9f8e562b5ddad13f54363f25599f16420440ca80..09b695091ffc4ecf686715a5b7a4f10434280cbb 100644
--- a/authorizer/src/asapo_authorizer/server/listroutes.go
+++ b/authorizer/src/asapo_authorizer/server/listroutes.go
@@ -11,6 +11,12 @@ var listRoutes = utils.Routes{
 		"/authorize",
 		routeAuthorize,
 	},
+	utils.Route{
+		"Authorize",
+		"POST",
+		"/introspect",
+		routeIntrospect,
+	},
 	utils.Route{
 		"HealthCheck",
 		"Get",
@@ -20,7 +26,13 @@ var listRoutes = utils.Routes{
 	utils.Route{
 		"Folder Token",
 		"POST",
-		"/folder",
+		"/{apiver}/folder",
 		routeFolderToken,
 	},
+	utils.Route{
+		"User Token",
+		"POST",
+		"/admin/issue",
+		routeAuthorisedTokenIssue,
+	},
 }
diff --git a/authorizer/src/asapo_authorizer/server/server.go b/authorizer/src/asapo_authorizer/server/server.go
index 7dc7aca8c467718c6324eb55e131b2da8c8defb7..8e26efcf473ccf444b9477bcaf4fb75d086a3147 100644
--- a/authorizer/src/asapo_authorizer/server/server.go
+++ b/authorizer/src/asapo_authorizer/server/server.go
@@ -1,8 +1,8 @@
 package server
 
 import (
+	"asapo_authorizer/authorization"
 	"asapo_authorizer/ldap_client"
-	"asapo_common/utils"
 )
 
 type  beamtimeMeta struct {
@@ -12,17 +12,19 @@ type  beamtimeMeta struct {
 	OfflinePath string `json:"core-path"`
 	OnlinePath string `json:"beamline-path"`
 	Type string `json:"source-type"`
+	AccessTypes []string `json:"access-types"`
 }
 
 type serverSettings struct {
-	Port                    int
-	LogLevel                string
-	RootBeamtimesFolder     string
+	Port                   int
+	LogLevel               string
+	RootBeamtimesFolder    string
 	CurrentBeamlinesFolder string
-	AlwaysAllowedBeamtimes  []beamtimeMeta
-	SecretFile              string
-	TokenDurationMin    	int
-	Ldap struct {
+	AlwaysAllowedBeamtimes []beamtimeMeta
+	UserSecretFile         string
+	AdminSecretFile        string
+	FolderTokenDurationMin int
+	Ldap                   struct {
 		Uri string
 		BaseDn string
 		FilterTemplate string
@@ -31,6 +33,5 @@ type serverSettings struct {
 
 var settings serverSettings
 var ldapClient ldap_client.LdapClient
-var authHMAC utils.Auth
-var authJWT utils.Auth
+var Auth *authorization.Auth
 
diff --git a/authorizer/src/asapo_authorizer/server/server_nottested.go b/authorizer/src/asapo_authorizer/server/server_nottested.go
index b141dcd960de570e86e3c57e2f062bb4983c17b1..2f428370f6955303d582f7bd22a551cb4dace88c 100644
--- a/authorizer/src/asapo_authorizer/server/server_nottested.go
+++ b/authorizer/src/asapo_authorizer/server/server_nottested.go
@@ -3,6 +3,7 @@
 package server
 
 import (
+	"asapo_authorizer/authorization"
 	"asapo_authorizer/ldap_client"
 	log "asapo_common/logger"
 	"asapo_common/utils"
@@ -14,21 +15,24 @@ import (
 
 func Start() {
 	mux := utils.NewRouter(listRoutes)
-	ldapClient = new (ldap_client.OpenLdapClient)
+	ldapClient = new(ldap_client.OpenLdapClient)
 	log.Info("Starting ASAPO Authorizer, version " + version.GetVersion())
 	log.Info("Listening on port: " + strconv.Itoa(settings.Port))
 	log.Fatal(http.ListenAndServe(":"+strconv.Itoa(settings.Port), http.HandlerFunc(mux.ServeHTTP)))
 }
 
-func createAuth() (utils.Auth,utils.Auth, error) {
-	secret, err := utils.ReadFirstStringFromFile(settings.SecretFile)
+func createAuth() (*authorization.Auth,error) {
+	secret, err := utils.ReadFirstStringFromFile(settings.UserSecretFile)
 	if err != nil {
-		return nil,nil, err
+		return nil, err
 	}
-	return utils.NewHMACAuth(secret),utils.NewJWTAuth(secret), nil
+	adminSecret, err := utils.ReadFirstStringFromFile(settings.AdminSecretFile)
+	if err != nil {
+		return nil, err
+	}
+	return authorization.NewAuth(utils.NewJWTAuth(secret), utils.NewJWTAuth(adminSecret), utils.NewJWTAuth(secret)),nil
 }
 
-
 func ReadConfig(fname string) (log.Level, error) {
 	if err := utils.ReadJsonFromFile(fname, &settings); err != nil {
 		return log.FatalLevel, err
@@ -38,12 +42,16 @@ func ReadConfig(fname string) (log.Level, error) {
 		return log.FatalLevel, errors.New("Server port not set")
 	}
 
-	if settings.SecretFile == "" {
-		return log.FatalLevel, errors.New("Secret file not set")
+	if settings.UserSecretFile == "" {
+		return log.FatalLevel, errors.New("User secret file not set")
+	}
+
+	if settings.AdminSecretFile == "" {
+		return log.FatalLevel, errors.New("Admin secret file not set")
 	}
 
 	var err error
-	authHMAC,authJWT, err = createAuth()
+	Auth, err = createAuth()
 	if err != nil {
 		return log.FatalLevel, err
 	}
diff --git a/broker/CMakeLists.txt b/broker/CMakeLists.txt
index d21235b49dc5cf7b4ba53364ed734deabe75e8b8..8d4e3ca3fe26d274ed69971a75274a6e7c9fdabf 100644
--- a/broker/CMakeLists.txt
+++ b/broker/CMakeLists.txt
@@ -1,20 +1,8 @@
 set (TARGET_NAME asapo-broker)
 
-if (NOT "$ENV{GOPATH}" STREQUAL "")
-	set(GOPATH $ENV{GOPATH})
-endif()
-
-if (NOT GOPATH)
-    message (FATAL_ERROR "GOPATH not set")
-endif()
-
-message(STATUS "global gopath ${GOPATH}")
-
 IF(WIN32)
-    set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go")
     set (exe_name "${TARGET_NAME}.exe")
 ELSE()
-    set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go)
     set (exe_name "${TARGET_NAME}")
 ENDIF()
 
@@ -23,8 +11,8 @@ include(testing_go)
 configure_file(docker/Dockerfile . COPYONLY)
 
 add_custom_target(asapo-broker ALL
-    COMMAND  ${CMAKE_COMMAND} -E env GOPATH=${gopath}
-    go build ${GO_OPTS} -o ${exe_name} asapo_broker/main
+    COMMAND go build ${GO_OPTS} -o ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} main/broker.go
+    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_broker
     VERBATIM)
 define_property(TARGET PROPERTY EXENAME
         BRIEF_DOCS <executable name>
@@ -32,6 +20,8 @@ define_property(TARGET PROPERTY EXENAME
 
 set_target_properties(asapo-broker PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name})
 
-gotest(${TARGET_NAME} "${CMAKE_CURRENT_SOURCE_DIR}" "./...")
-go_integration_test(${TARGET_NAME}-connectdb "./..." "MongoDBConnect")
-go_integration_test(${TARGET_NAME}-nextrecord "./..." "MongoDBNext")
+gotest(${TARGET_NAME} "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_broker" "./...")
+
+
+go_integration_test(${TARGET_NAME}-connectdb "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_broker" "./..." "MongoDBConnect")
+go_integration_test(${TARGET_NAME}-nextrecord "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_broker" "./..." "MongoDBNext")
diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go
index 6683be24c80d02489b474c8af254d8493cfc38a8..5e63509e5a687a2bc95bd77df8ae6a6e13bba017 100644
--- a/broker/src/asapo_broker/database/mongodb.go
+++ b/broker/src/asapo_broker/database/mongodb.go
@@ -8,7 +8,6 @@ import (
 	"context"
 	"encoding/json"
 	"errors"
-	"fmt"
 	"go.mongodb.org/mongo-driver/bson"
 	"go.mongodb.org/mongo-driver/bson/primitive"
 	"go.mongodb.org/mongo-driver/mongo"
@@ -25,27 +24,29 @@ type ID struct {
 	ID int `bson:"_id"`
 }
 
-type ServiceRecord struct {
-	ID   int                    `json:"_id"`
-	Name string                 `json:"name"`
-	Meta map[string]interface{} `json:"meta"`
+type MessageRecord struct {
+	ID             int                    `json:"_id"`
+	Timestamp      int                    `bson:"timestamp" json:"timestamp"`
+	Name           string                 `json:"name"`
+	Meta           map[string]interface{} `json:"meta"`
+	NextStream     string
+	FinishedStream bool
 }
 
 type InProcessingRecord struct {
-	ID       int `bson:"_id" json:"_id"`
-	MaxResendAttempts int `bson:"maxResendAttempts" json:"maxResendAttempts"`
-	ResendAttempts int `bson:"resendAttempts" json:"resendAttempts"`
-	DelayMs  int64 `bson:"delayMs" json:"delayMs"`
+	ID                int   `bson:"_id" json:"_id"`
+	MaxResendAttempts int   `bson:"maxResendAttempts" json:"maxResendAttempts"`
+	ResendAttempts    int   `bson:"resendAttempts" json:"resendAttempts"`
+	DelayMs           int64 `bson:"delayMs" json:"delayMs"`
 }
 
 type NegAckParamsRecord struct {
-	ID       int `bson:"_id" json:"_id"`
-	MaxResendAttempts int `bson:"maxResendAttempts" json:"maxResendAttempts"`
-	ResendAttempts int `bson:"resendAttempts" json:"resendAttempts"`
-	DelayMs  int64 `bson:"delayMs" json:"delayMs"`
+	ID                int   `bson:"_id" json:"_id"`
+	MaxResendAttempts int   `bson:"maxResendAttempts" json:"maxResendAttempts"`
+	ResendAttempts    int   `bson:"resendAttempts" json:"resendAttempts"`
+	DelayMs           int64 `bson:"delayMs" json:"delayMs"`
 }
 
-
 type Nacks struct {
 	Unacknowledged []int `json:"unacknowledged"`
 }
@@ -71,10 +72,12 @@ const already_connected_msg = "already connected"
 
 const finish_stream_keyword = "asapo_finish_stream"
 const no_next_stream_keyword = "asapo_no_next"
-
-var dbSessionLock sync.Mutex
+const stream_filter_all = "all"
+const stream_filter_finished = "finished"
+const stream_filter_unfinished = "unfinished"
 
 
+var dbSessionLock sync.Mutex
 
 type SizeRecord struct {
 	Size int `bson:"size" json:"size"`
@@ -161,18 +164,24 @@ func (db *Mongodb) insertMeta(dbname string, s interface{}) error {
 	return err
 }
 
-func (db *Mongodb) getMaxIndex(request Request, returnIncompete bool) (max_id int, err error) {
-	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+func maxIndexQuery(request Request, returnIncompete bool) bson.M {
 	var q bson.M
 	if request.DatasetOp && !returnIncompete {
-		if request.MinDatasetSize>0 {
-			q = bson.M{"size": bson.M{"$gte": request.MinDatasetSize}}
+		if request.MinDatasetSize > 0 {
+			q = bson.M{"$expr": bson.M{"$gte": []interface{}{bson.M{"$size": "$messages"}, request.MinDatasetSize}}}
 		} else {
 			q = bson.M{"$expr": bson.M{"$eq": []interface{}{"$size", bson.M{"$size": "$messages"}}}}
 		}
+		q = bson.M{"$or": []interface{}{bson.M{"name": finish_stream_keyword}, q}}
 	} else {
 		q = nil
 	}
+	return q
+}
+
+func (db *Mongodb) getMaxIndex(request Request, returnIncompete bool) (max_id int, err error) {
+	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
+	q := maxIndexQuery(request, returnIncompete)
 
 	opts := options.FindOne().SetSort(bson.M{"_id": -1}).SetReturnKey(true)
 	var result ID
@@ -184,7 +193,6 @@ func (db *Mongodb) getMaxIndex(request Request, returnIncompete bool) (max_id in
 
 	return result.ID, err
 }
-
 func duplicateError(err error) bool {
 	command_error, ok := err.(mongo.CommandError)
 	if !ok {
@@ -219,7 +227,7 @@ func (db *Mongodb) incrementField(request Request, max_ind int, res interface{})
 		if err == mongo.ErrNoDocuments || duplicateError(err) {
 			// try again without upsert - if the first error was due to missing pointer
 			opts = options.FindOneAndUpdate().SetUpsert(false).SetReturnDocument(options.After)
-			if err2 := c.FindOneAndUpdate(context.TODO(), q, update, opts).Decode(res);err2==nil {
+			if err2 := c.FindOneAndUpdate(context.TODO(), q, update, opts).Decode(res); err2 == nil {
 				return nil
 			}
 			return &DBError{utils.StatusNoData, encodeAnswer(max_ind, max_ind, "")}
@@ -232,64 +240,77 @@ func (db *Mongodb) incrementField(request Request, max_ind int, res interface{})
 
 func encodeAnswer(id, id_max int, next_stream string) string {
 	var r = struct {
-		Op             string `json:"op"`
-		Id             int    `json:"id"`
-		Id_max         int    `json:"id_max"`
+		Op          string `json:"op"`
+		Id          int    `json:"id"`
+		Id_max      int    `json:"id_max"`
 		Next_stream string `json:"next_stream"`
 	}{"get_record_by_id", id, id_max, next_stream}
 	answer, _ := json.Marshal(&r)
 	return string(answer)
 }
 
-func (db *Mongodb) getRecordByIDRow(request Request, id, id_max int) ([]byte, error) {
-	var res map[string]interface{}
-	q := bson.M{"_id": id}
+func recordContainsPartialData(request Request, rec map[string]interface{}) bool {
+	if !request.DatasetOp {
+		return false
+	}
 
+	name, ok_name := rec["name"].(string)
+	if ok_name && name == finish_stream_keyword {
+		return false
+	}
+	imgs, ok1 := rec["messages"].(primitive.A)
+	expectedSize, ok2 := utils.InterfaceToInt64(rec["size"])
+	if !ok1 || !ok2 {
+		return false
+	}
+	nMessages := len(imgs)
+	if (request.MinDatasetSize == 0 && int64(nMessages) != expectedSize) || (request.MinDatasetSize == 0 && nMessages < request.MinDatasetSize) {
+		return true
+	}
+	return false
+}
+
+func (db *Mongodb) getRecordFromDb(request Request, id, id_max int) (res map[string]interface{}, err error) {
+	q := bson.M{"_id": id}
 	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
-	err := c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
+	err = c.FindOne(context.TODO(), q, options.FindOne()).Decode(&res)
 	if err != nil {
 		answer := encodeAnswer(id, id_max, "")
 		log_str := "error getting record id " + strconv.Itoa(id) + " for " + request.DbName + " : " + err.Error()
-		fmt.Println(err)
 		logger.Debug(log_str)
-		return nil, &DBError{utils.StatusNoData, answer}
+		return res, &DBError{utils.StatusNoData, answer}
 	}
+	return res, err
+}
 
-	partialData := false
-	if request.DatasetOp {
-		imgs,ok1 :=res["messages"].(primitive.A)
-		expectedSize,ok2 := utils.InterfaceToInt64(res["size"])
-		if !ok1 || !ok2 {
-			return nil, &DBError{utils.StatusTransactionInterrupted, "getRecordByIDRow: cannot parse database response" }
-		}
-		nMessages := len(imgs)
-		if (request.MinDatasetSize==0 && int64(nMessages)!=expectedSize) || (request.MinDatasetSize==0 && nMessages<request.MinDatasetSize) {
-			partialData = true
-		}
+func (db *Mongodb) getRecordByIDRaw(request Request, id, id_max int) ([]byte, error) {
+	res, err := db.getRecordFromDb(request, id, id_max)
+	if err != nil {
+		return nil, err
 	}
 
-	if partialData {
-		log_str := "got record id " + strconv.Itoa(id) + " for " + request.DbName
-		logger.Debug(log_str)
-	} else {
-		log_str := "got record id " + strconv.Itoa(id) + " for " + request.DbName
-		logger.Debug(log_str)
+	if err := checkStreamFinished(request, id, id_max, res); err != nil {
+		return nil, err
 	}
 
-	answer,err := utils.MapToJson(&res)
-	if err!=nil {
-		return nil,err
+	log_str := "got record id " + strconv.Itoa(id) + " for " + request.DbName
+	logger.Debug(log_str)
+
+	record, err := utils.MapToJson(&res)
+	if err != nil {
+		return nil, err
 	}
-	if partialData {
-		return nil,&DBError{utils.StatusPartialData, string(answer)}
+	if recordContainsPartialData(request, res) {
+		return nil, &DBError{utils.StatusPartialData, string(record)}
+	} else {
+		return record, nil
 	}
-	return answer,nil
 }
 
-func (db *Mongodb) getEarliestRecord(dbname string, collection_name string) (map[string]interface{}, error) {
+func (db *Mongodb) getRawRecordWithSort(dbname string, collection_name string, sortField string, sortOrder int) (map[string]interface{}, error) {
 	var res map[string]interface{}
 	c := db.client.Database(dbname).Collection(data_collection_name_prefix + collection_name)
-	opts := options.FindOne().SetSort(bson.M{"timestamp": 1})
+	opts := options.FindOne().SetSort(bson.M{sortField: sortOrder})
 	var q bson.M = nil
 	err := c.FindOne(context.TODO(), q, opts).Decode(&res)
 
@@ -297,9 +318,17 @@ func (db *Mongodb) getEarliestRecord(dbname string, collection_name string) (map
 		if err == mongo.ErrNoDocuments {
 			return map[string]interface{}{}, nil
 		}
-		return nil,err
+		return nil, err
 	}
-	return res,nil
+	return res, nil
+}
+
+func (db *Mongodb) getLastRawRecord(dbname string, collection_name string) (map[string]interface{}, error) {
+	return db.getRawRecordWithSort(dbname, collection_name, "_id", -1)
+}
+
+func (db *Mongodb) getEarliestRawRecord(dbname string, collection_name string) (map[string]interface{}, error) {
+	return db.getRawRecordWithSort(dbname, collection_name, "timestamp", 1)
 }
 
 func (db *Mongodb) getRecordByID(request Request) ([]byte, error) {
@@ -308,18 +337,17 @@ func (db *Mongodb) getRecordByID(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	max_ind, err := db.getMaxIndex(request,true)
+	max_ind, err := db.getMaxIndex(request, true)
 	if err != nil {
 		return nil, err
 	}
 
-	return db.getRecordByIDRow(request, id, max_ind)
-
+	return db.getRecordByIDRaw(request, id, max_ind)
 }
 
 func (db *Mongodb) negAckRecord(request Request) ([]byte, error) {
 	input := struct {
-		Id int
+		Id     int
 		Params struct {
 			DelayMs int
 		}
@@ -330,14 +358,13 @@ func (db *Mongodb) negAckRecord(request Request) ([]byte, error) {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
 
-	err =  db.InsertRecordToInprocess(request.DbName,inprocess_collection_name_prefix+request.GroupId,input.Id,input.Params.DelayMs, 1)
+	err = db.InsertRecordToInprocess(request.DbName, inprocess_collection_name_prefix+request.GroupId, input.Id, input.Params.DelayMs, 1)
 	return []byte(""), err
 }
 
-
 func (db *Mongodb) ackRecord(request Request) ([]byte, error) {
 	var record ID
-	err := json.Unmarshal([]byte(request.ExtraParam),&record)
+	err := json.Unmarshal([]byte(request.ExtraParam), &record)
 	if err != nil {
 		return nil, &DBError{utils.StatusWrongInput, err.Error()}
 	}
@@ -368,7 +395,7 @@ func (db *Mongodb) checkDatabaseOperationPrerequisites(request Request) error {
 }
 
 func (db *Mongodb) getCurrentPointer(request Request) (LocationPointer, int, error) {
-	max_ind, err := db.getMaxIndex(request,true)
+	max_ind, err := db.getMaxIndex(request, true)
 	if err != nil {
 		return LocationPointer{}, 0, err
 	}
@@ -386,18 +413,18 @@ func (db *Mongodb) getCurrentPointer(request Request) (LocationPointer, int, err
 	return curPointer, max_ind, nil
 }
 
-func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delayMs int,nResendAttempts int) (int, error) {
+func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delayMs int, nResendAttempts int) (int, error) {
 	var res InProcessingRecord
 	opts := options.FindOneAndUpdate().SetUpsert(false).SetReturnDocument(options.After)
 	tNow := time.Now().UnixNano()
- 	var update bson.M
-	if nResendAttempts==0 {
-		update = bson.M{"$set": bson.M{"delayMs": tNow + int64(delayMs*1e6) ,"maxResendAttempts":math.MaxInt32}, "$inc": bson.M{"resendAttempts": 1}}
+	var update bson.M
+	if nResendAttempts == 0 {
+		update = bson.M{"$set": bson.M{"delayMs": tNow + int64(delayMs*1e6), "maxResendAttempts": math.MaxInt32}, "$inc": bson.M{"resendAttempts": 1}}
 	} else {
-		update = bson.M{"$set": bson.M{"delayMs": tNow + int64(delayMs*1e6) ,"maxResendAttempts":nResendAttempts}, "$inc": bson.M{"resendAttempts": 1}}
+		update = bson.M{"$set": bson.M{"delayMs": tNow + int64(delayMs*1e6), "maxResendAttempts": nResendAttempts}, "$inc": bson.M{"resendAttempts": 1}}
 	}
 
-	q := bson.M{"delayMs": bson.M{"$lte": tNow},"$expr": bson.M{"$lt": []string{"$resendAttempts","$maxResendAttempts"}}}
+	q := bson.M{"delayMs": bson.M{"$lte": tNow}, "$expr": bson.M{"$lt": []string{"$resendAttempts", "$maxResendAttempts"}}}
 	c := db.client.Database(dbname).Collection(collection_name)
 	err := c.FindOneAndUpdate(context.TODO(), q, update, opts).Decode(&res)
 	if err != nil {
@@ -412,9 +439,9 @@ func (db *Mongodb) getUnProcessedId(dbname string, collection_name string, delay
 	return res.ID, nil
 }
 
-func (db *Mongodb) InsertRecordToInprocess(db_name string, collection_name string,id int,delayMs int, nResendAttempts int) error {
+func (db *Mongodb) InsertRecordToInprocess(db_name string, collection_name string, id int, delayMs int, nResendAttempts int) error {
 	record := InProcessingRecord{
-		id, nResendAttempts, 0,time.Now().UnixNano()+int64(delayMs*1e6),
+		id, nResendAttempts, 0, time.Now().UnixNano() + int64(delayMs*1e6),
 	}
 
 	c := db.client.Database(db_name).Collection(collection_name)
@@ -434,12 +461,12 @@ func (db *Mongodb) InsertToInprocessIfNeeded(db_name string, collection_name str
 		return err
 	}
 
-	return db.InsertRecordToInprocess(db_name,collection_name,id,delayMs, nResendAttempts)
+	return db.InsertRecordToInprocess(db_name, collection_name, id, delayMs, nResendAttempts)
 
 }
 
 func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTimeout bool) (int, int, error) {
-	var record_ind,  max_ind, delayMs, nResendAttempts int
+	var record_ind, max_ind, delayMs, nResendAttempts int
 	var err error
 	if len(request.ExtraParam) != 0 {
 		delayMs, nResendAttempts, err = extractsTwoIntsFromString(request.ExtraParam)
@@ -451,7 +478,7 @@ func (db *Mongodb) getNextAndMaxIndexesFromInprocessed(request Request, ignoreTi
 	}
 	tNow := time.Now().Unix()
 	if (atomic.LoadInt64(&db.lastReadFromInprocess) <= tNow-int64(db.settings.ReadFromInprocessPeriod)) || ignoreTimeout {
-		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.GroupId, delayMs,nResendAttempts)
+		record_ind, err = db.getUnProcessedId(request.DbName, inprocess_collection_name_prefix+request.GroupId, delayMs, nResendAttempts)
 		if err != nil {
 			log_str := "error getting unprocessed id " + request.DbName + ", groupid: " + request.GroupId + ":" + err.Error()
 			logger.Debug(log_str)
@@ -505,45 +532,61 @@ func (db *Mongodb) getNextAndMaxIndexes(request Request) (int, int, error) {
 	return nextInd, maxInd, nil
 }
 
-func (db *Mongodb) processLastRecord(request Request, data []byte, err error) ([]byte, error) {
-	var r ServiceRecord
-	err = json.Unmarshal(data, &r)
-	if err != nil || r.Name != finish_stream_keyword {
-		return data, err
-	}
-	var next_stream string
-	next_stream, ok := r.Meta["next_stream"].(string)
-	if !ok {
-		next_stream = no_next_stream_keyword
+func ExtractMessageRecord(data map[string]interface{}) (MessageRecord, bool) {
+	var r MessageRecord
+	err := utils.MapToStruct(data, &r)
+	if err != nil {
+		return r, false
+	}
+	r.FinishedStream = (r.Name == finish_stream_keyword)
+	if r.FinishedStream {
+		var next_stream string
+		next_stream, ok := r.Meta["next_stream"].(string)
+		if !ok {
+			next_stream = no_next_stream_keyword
+		}
+		r.NextStream = next_stream
 	}
+	return r, true
+}
 
-	answer := encodeAnswer(r.ID, r.ID, next_stream)
-	log_str := "reached end of stream " + request.DbCollectionName + " , next_stream: " + next_stream
-	logger.Debug(log_str)
-
-
+func (db *Mongodb) tryGetRecordFromInprocessed(request Request, originalerror error) ([]byte, error) {
 	var err_inproc error
 	nextInd, maxInd, err_inproc := db.getNextAndMaxIndexesFromInprocessed(request, true)
 	if err_inproc != nil {
 		return nil, err_inproc
 	}
 	if nextInd != 0 {
-		return  db.getRecordByIDRow(request, nextInd, maxInd)
+		return db.getRecordByIDRaw(request, nextInd, maxInd)
+	} else {
+		return nil, originalerror
+	}
+}
+
+func checkStreamFinished(request Request, id, id_max int, data map[string]interface{}) error {
+	if id != id_max {
+		return nil
+	}
+	r, ok := ExtractMessageRecord(data)
+	if !ok || !r.FinishedStream {
+		return nil
 	}
+	log_str := "reached end of stream " + request.DbCollectionName + " , next_stream: " + r.NextStream
+	logger.Debug(log_str)
 
-	return nil, &DBError{utils.StatusNoData, answer}
+	answer := encodeAnswer(r.ID-1, r.ID-1, r.NextStream)
+	return &DBError{utils.StatusNoData, answer}
 }
 
 func (db *Mongodb) getNextRecord(request Request) ([]byte, error) {
-
 	nextInd, maxInd, err := db.getNextAndMaxIndexes(request)
 	if err != nil {
 		return nil, err
 	}
 
-	data, err := db.getRecordByIDRow(request, nextInd, maxInd)
-	if nextInd == maxInd && GetStatusCodeFromError(err)!=utils.StatusPartialData {
-		data, err = db.processLastRecord(request,data, err)
+	data, err := db.getRecordByIDRaw(request, nextInd, maxInd)
+	if err != nil {
+		data, err = db.tryGetRecordFromInprocessed(request, err)
 	}
 
 	if err == nil {
@@ -560,18 +603,33 @@ func (db *Mongodb) getLastRecord(request Request) ([]byte, error) {
 	if err != nil {
 		return nil, err
 	}
-	return db.getRecordByIDRow(request, max_ind, max_ind)
+	return db.getRecordByIDRaw(request, max_ind, max_ind)
+}
+
+func getSizeFilter(request Request) bson.M {
+	filter := bson.M{}
+	if request.ExtraParam == "false" { // do not return incomplete datasets
+		filter = bson.M{"$expr": bson.M{"$eq": []interface{}{"$size", bson.M{"$size": "$messages"}}}}
+	} else if request.ExtraParam == "true" {
+		filter = bson.M{"$expr": bson.M{"$gt": []interface{}{bson.M{"$size": "$messages"}, 0}}}
+	}
+	filter = bson.M{"$and": []interface{}{bson.M{"name": bson.M{"$ne": finish_stream_keyword}}, filter}}
+	return filter
 }
 
 func (db *Mongodb) getSize(request Request) ([]byte, error) {
 	c := db.client.Database(request.DbName).Collection(data_collection_name_prefix + request.DbCollectionName)
-	var rec SizeRecord
-	var err error
 
-	size, err := c.CountDocuments(context.TODO(), bson.M{}, options.Count())
+	filter := getSizeFilter(request)
+	size, err := c.CountDocuments(context.TODO(), filter, options.Count())
 	if err != nil {
+		if ce, ok := err.(mongo.CommandError); ok && ce.Code == 17124 {
+			return nil, &DBError{utils.StatusWrongInput, "no datasets found"}
+		}
 		return nil, err
 	}
+
+	var rec SizeRecord
 	rec.Size = int(size)
 	return json.Marshal(&rec)
 }
@@ -583,7 +641,7 @@ func (db *Mongodb) resetCounter(request Request) ([]byte, error) {
 	}
 
 	err = db.setCounter(request, id)
-	if err!= nil {
+	if err != nil {
 		return []byte(""), err
 	}
 
@@ -684,10 +742,10 @@ func extractsTwoIntsFromString(from_to string) (int, int, error) {
 
 }
 
-func (db *Mongodb) nacks(request Request) ([]byte, error) {
+func (db *Mongodb) getNacksLimits(request Request) (int, int, error) {
 	from, to, err := extractsTwoIntsFromString(request.ExtraParam)
 	if err != nil {
-		return nil, err
+		return 0, 0, err
 	}
 
 	if from == 0 {
@@ -695,11 +753,31 @@ func (db *Mongodb) nacks(request Request) ([]byte, error) {
 	}
 
 	if to == 0 {
-		to, err = db.getMaxIndex(request, true)
+		to, err = db.getMaxLimitWithoutEndOfStream(request, err)
 		if err != nil {
-			return nil, err
+			return 0, 0, err
 		}
 	}
+	return from, to, nil
+}
+
+func (db *Mongodb) getMaxLimitWithoutEndOfStream(request Request, err error) (int, error) {
+	maxInd, err := db.getMaxIndex(request, true)
+	if err != nil {
+		return 0, err
+	}
+	_, last_err := db.getRecordByIDRaw(request, maxInd, maxInd)
+	if last_err != nil && maxInd > 0 {
+		maxInd = maxInd - 1
+	}
+	return maxInd, nil
+}
+
+func (db *Mongodb) nacks(request Request) ([]byte, error) {
+	from, to, err := db.getNacksLimits(request)
+	if err != nil {
+		return nil, err
+	}
 
 	res := Nacks{[]int{}}
 	if to == 0 {
@@ -730,27 +808,28 @@ func (db *Mongodb) lastAck(request Request) ([]byte, error) {
 	return utils.MapToJson(&result)
 }
 
-func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, error) {
-
-	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
-
+func (db *Mongodb) canAvoidDbRequest(min_index int, max_index int, c *mongo.Collection) ([]int, error, bool) {
 	if min_index > max_index {
-		return []int{}, errors.New("from index is greater than to index")
+		return []int{}, errors.New("from index is greater than to index"), true
 	}
 
 	size, err := c.CountDocuments(context.TODO(), bson.M{}, options.Count())
 	if err != nil {
-		return []int{}, err
+		return []int{}, err, true
 	}
 
 	if size == 0 {
-		return makeRange(min_index, max_index), nil
+		return makeRange(min_index, max_index), nil, true
 	}
 
 	if min_index == 1 && int(size) == max_index {
-		return []int{}, nil
+		return []int{}, nil, true
 	}
 
+	return nil, nil, false
+}
+
+func getNacksQuery(max_index int, min_index int) []bson.D {
 	matchStage := bson.D{{"$match", bson.D{{"_id", bson.D{{"$lt", max_index + 1}, {"$gt", min_index - 1}}}}}}
 	groupStage := bson.D{
 		{"$group", bson.D{
@@ -766,30 +845,41 @@ func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, e
 				{"$setDifference", bson.A{bson.D{{"$range", bson.A{min_index, max_index + 1}}}, "$numbers"}},
 			}}},
 		}}
+	return mongo.Pipeline{matchStage, groupStage, projectStage}
+}
 
-	query := mongo.Pipeline{matchStage, groupStage, projectStage}
-	cursor, err := c.Aggregate(context.Background(), query)
-	type res struct {
+func extractNacsFromCursor(err error, cursor *mongo.Cursor) ([]int, error) {
+	resp := []struct {
 		Numbers []int
-	}
-	resp := []res{}
+	}{}
 	err = cursor.All(context.Background(), &resp)
 	if err != nil || len(resp) != 1 {
 		return []int{}, err
 	}
-
 	return resp[0].Numbers, nil
 }
 
+func (db *Mongodb) getNacks(request Request, min_index, max_index int) ([]int, error) {
+	c := db.client.Database(request.DbName).Collection(acks_collection_name_prefix + request.DbCollectionName + "_" + request.GroupId)
+
+	if res, err, ok := db.canAvoidDbRequest(min_index, max_index, c); ok {
+		return res, err
+	}
+
+	query := getNacksQuery(max_index, min_index)
+	cursor, err := c.Aggregate(context.Background(), query)
+
+	return extractNacsFromCursor(err, cursor)
+}
+
 func (db *Mongodb) getStreams(request Request) ([]byte, error) {
-	rec, err := streams.getStreams(db,request.DbName,request.ExtraParam)
+	rec, err := streams.getStreams(db, request)
 	if err != nil {
 		return db.processQueryError("get streams", request.DbName, err)
 	}
 	return json.Marshal(&rec)
 }
 
-
 func (db *Mongodb) ProcessRequest(request Request) (answer []byte, err error) {
 	if err := db.checkDatabaseOperationPrerequisites(request); err != nil {
 		return nil, err
diff --git a/broker/src/asapo_broker/database/mongodb_query.go b/broker/src/asapo_broker/database/mongodb_query.go
index faa6f718b6d2dfd02abdd393596acf881d7db6ff..7348ac93ec6d1726eb8c70d5158104d7f7b86141 100644
--- a/broker/src/asapo_broker/database/mongodb_query.go
+++ b/broker/src/asapo_broker/database/mongodb_query.go
@@ -6,7 +6,7 @@ import (
 	"errors"
 	"fmt"
 	"go.mongodb.org/mongo-driver/bson"
-	"github.com/knocknote/vitess-sqlparser/sqlparser"
+	"github.com/blastrain/vitess-sqlparser/sqlparser"
 	"strconv"
 )
 
diff --git a/broker/src/asapo_broker/database/mongodb_streams.go b/broker/src/asapo_broker/database/mongodb_streams.go
index fba70330b025ad6ae762c4574324073b3900a655..ce1ce2cdac11aa19bba2c9fec166d4997a63b6f2 100644
--- a/broker/src/asapo_broker/database/mongodb_streams.go
+++ b/broker/src/asapo_broker/database/mongodb_streams.go
@@ -14,8 +14,12 @@ import (
 )
 
 type StreamInfo struct {
-	Name      string `json:"name"`
-	Timestamp int64  `json:"timestampCreated"`
+	LastId        int64  `json:"lastId"`
+	Name          string `json:"name"`
+	Timestamp     int64  `json:"timestampCreated"`
+	TimestampLast int64  `json:"timestampLast"`
+	Finished      bool   `json:"finished"`
+	NextStream    string `json:"nextStream"`
 }
 
 type StreamsRecord struct {
@@ -38,7 +42,9 @@ func (ss *Streams) tryGetFromCache(db_name string, updatePeriodMs int) (StreamsR
 	if !ok {
 		return StreamsRecord{}, errors.New("no records for " + db_name)
 	}
-	return rec, nil
+	res :=StreamsRecord{}
+	utils.DeepCopy(rec,&res)
+	return res, nil
 }
 
 func readStreams(db *Mongodb, db_name string) (StreamsRecord, error) {
@@ -57,32 +63,93 @@ func readStreams(db *Mongodb, db_name string) (StreamsRecord, error) {
 	return rec, nil
 }
 
-func updateTimestamps(db *Mongodb, db_name string, rec *StreamsRecord) {
-	ss,dbFound :=streams.records[db_name]
+func getCurrentStreams(db_name string) []StreamInfo {
+	ss, dbFound := streams.records[db_name]
 	currentStreams := []StreamInfo{}
 	if dbFound {
 		// sort streams by name
-		currentStreams=ss.Streams
-		sort.Slice(currentStreams,func(i, j int) bool {
-			return currentStreams[i].Name>=currentStreams[j].Name
+		currentStreams = ss.Streams
+		sort.Slice(currentStreams, func(i, j int) bool {
+			return currentStreams[i].Name >= currentStreams[j].Name
 		})
 	}
+	return currentStreams
+}
+
+func findStreamAmongCurrent(currentStreams []StreamInfo, record StreamInfo) (int, bool) {
+	ind := sort.Search(len(currentStreams), func(i int) bool {
+		return currentStreams[i].Name >= record.Name
+	})
+	if ind < len(currentStreams) && currentStreams[ind].Name == record.Name {
+		return ind, true
+	}
+	return -1, false
+}
+
+func fillInfoFromEarliestRecord(db *Mongodb, db_name string, rec *StreamsRecord, record StreamInfo, i int) error {
+	res, err := db.getEarliestRawRecord(db_name, record.Name)
+	if err != nil {
+		return err
+	}
+	ts, ok := utils.GetInt64FromMap(res, "timestamp")
+	if ok {
+		rec.Streams[i].Timestamp = ts
+	} else {
+		return errors.New("fillInfoFromEarliestRecord: cannot extact timestamp")
+	}
+	return nil
+}
+
+func fillInfoFromLastRecord(db *Mongodb, db_name string, rec *StreamsRecord, record StreamInfo, i int) error {
+	res, err := db.getLastRawRecord(db_name, record.Name)
+	if err != nil {
+		return err
+	}
+	mrec, ok := ExtractMessageRecord(res)
+	if !ok {
+		return errors.New("fillInfoFromLastRecord: cannot extract record")
+	}
+
+	rec.Streams[i].LastId = int64(mrec.ID)
+	rec.Streams[i].TimestampLast = int64(mrec.Timestamp)
+	rec.Streams[i].Finished = mrec.FinishedStream
+	if mrec.FinishedStream {
+		rec.Streams[i].LastId = rec.Streams[i].LastId - 1
+		if mrec.NextStream != no_next_stream_keyword {
+			rec.Streams[i].NextStream = mrec.NextStream
+		}
+	}
+	return nil
+}
+
+func updateStreamInfofromCurrent(currentStreams []StreamInfo, record StreamInfo, rec *StreamInfo) (found bool, updateFinished bool) {
+	ind, found := findStreamAmongCurrent(currentStreams, record)
+	if found {
+		*rec = currentStreams[ind]
+		if currentStreams[ind].Finished {
+			return found, true
+		}
+	}
+	return found, false
+}
+
+func updateStreamInfos(db *Mongodb, db_name string, rec *StreamsRecord) error {
+	currentStreams := getCurrentStreams(db_name)
 	for i, record := range rec.Streams {
-		ind := sort.Search(len(currentStreams),func(i int) bool {
-			return currentStreams[i].Name>=record.Name
-		})
-		if ind < len(currentStreams) && currentStreams[ind].Name == record.Name { // record found, just skip it
-			rec.Streams[i].Timestamp = currentStreams[ind].Timestamp
+		found, mayContinue := updateStreamInfofromCurrent(currentStreams, record, &rec.Streams[i])
+		if mayContinue {
 			continue
 		}
-		res, err := db.getEarliestRecord(db_name, record.Name)
-		if err == nil {
-			ts,ok:=utils.InterfaceToInt64(res["timestamp"])
-			if ok {
-				rec.Streams[i].Timestamp = ts
+		if !found { // set timestamp
+			if err := fillInfoFromEarliestRecord(db, db_name, rec, record, i); err != nil {
+				return err
 			}
 		}
+		if err := fillInfoFromLastRecord(db, db_name, rec, record, i); err != nil { // update firstStream last record (timestamp, stream finished flag)
+			return err
+		}
 	}
+	return nil
 }
 
 func sortRecords(rec *StreamsRecord) {
@@ -96,35 +163,115 @@ func (ss *Streams) updateFromDb(db *Mongodb, db_name string) (StreamsRecord, err
 	if err != nil {
 		return StreamsRecord{}, err
 	}
-	updateTimestamps(db, db_name, &rec)
+	err = updateStreamInfos(db, db_name, &rec)
+	if err != nil {
+		return StreamsRecord{}, err
+	}
+
 	sortRecords(&rec)
-	if len(rec.Streams)>0 {
-		ss.records[db_name] = rec
+	if len(rec.Streams) > 0 {
+		res :=StreamsRecord{}
+		utils.DeepCopy(rec,&res)
+		ss.records[db_name] = res
 		ss.lastUpdated = time.Now().UnixNano()
 	}
 	return rec, nil
 }
 
-func (ss *Streams) getStreams(db *Mongodb, db_name string, from string) (StreamsRecord, error) {
-	streamsLock.Lock()
-	rec, err := ss.tryGetFromCache(db_name,db.settings.UpdateStreamCachePeriodMs)
+func getFiltersFromString(filterString string) (string, string, error) {
+	firstStream := ""
+	streamStatus := ""
+	s := strings.Split(filterString, "_")
+	switch len(s) {
+	case 1:
+		firstStream = s[0]
+	case 2:
+		firstStream = s[0]
+		streamStatus = s[1]
+	default:
+		return "", "", errors.New("wrong format: " + filterString)
+	}
+	if streamStatus == "" {
+		streamStatus = stream_filter_all
+	}
+	return firstStream, streamStatus, nil
+}
+
+func getStreamsParamsFromRequest(request Request) (string, string, error) {
+	if request.ExtraParam == "" {
+		return "", stream_filter_all, nil
+	}
+
+	firstStream, streamStatus, err := getFiltersFromString(request.ExtraParam)
 	if err != nil {
-		rec, err = ss.updateFromDb(db, db_name)
+		return "", "", err
 	}
-	streamsLock.Unlock()
+
+	err = checkStreamstreamStatus(streamStatus)
 	if err != nil {
-		return StreamsRecord{}, err
+		return "", "", err
+	}
+
+	return firstStream, streamStatus, nil
+}
+
+func checkStreamstreamStatus(streamStatus string) error {
+	if !utils.StringInSlice(streamStatus, []string{stream_filter_all, stream_filter_finished, stream_filter_unfinished}) {
+		return errors.New("getStreamsParamsFromRequest: wrong streamStatus " + streamStatus)
+	}
+	return nil
+}
+
+func keepStream(rec StreamInfo, streamStatus string) bool {
+	return (rec.Finished && streamStatus == stream_filter_finished) || (!rec.Finished && streamStatus == stream_filter_unfinished)
+}
+
+func filterStreams(rec StreamsRecord, firstStream string, streamStatus string) []StreamInfo {
+	limitedStreams := limitStreams(rec, firstStream)
+
+	if streamStatus == stream_filter_all {
+		return limitedStreams
+	}
+	nextStreams := limitedStreams[:0]
+	for _, rec := range limitedStreams {
+		if keepStream(rec, streamStatus) {
+			nextStreams = append(nextStreams, rec)
+		}
 	}
+	return nextStreams
+}
 
-	if from != "" {
+func limitStreams(rec StreamsRecord, firstStream string) []StreamInfo {
+	if firstStream != "" {
 		ind := len(rec.Streams)
 		for i, rec := range rec.Streams {
-			if rec.Name == from {
+			if rec.Name == firstStream {
 				ind = i
 				break
 			}
 		}
 		rec.Streams = rec.Streams[ind:]
 	}
+	return rec.Streams
+}
+
+func (ss *Streams) getStreams(db *Mongodb, request Request) (StreamsRecord, error) {
+	firstStream, streamStatus, err := getStreamsParamsFromRequest(request)
+	if err != nil {
+		return StreamsRecord{}, err
+	}
+
+	streamsLock.Lock()
+	rec, err := ss.tryGetFromCache(request.DbName, db.settings.UpdateStreamCachePeriodMs)
+	if err != nil {
+		rec, err = ss.updateFromDb(db, request.DbName)
+	}
+	streamsLock.Unlock()
+	if err != nil {
+		return StreamsRecord{}, err
+	}
+
+	rec.Streams = filterStreams(rec, firstStream, streamStatus)
+
 	return rec, nil
 }
diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go
index dbf379375b64abcdfc3c82244b42f87bc8d4330f..b0f2e97a38901e168cb1964a44beacb2de73593a 100644
--- a/broker/src/asapo_broker/database/mongodb_test.go
+++ b/broker/src/asapo_broker/database/mongodb_test.go
@@ -13,16 +13,17 @@ import (
 )
 
 type TestRecord struct {
-	ID        int64               `bson:"_id" json:"_id"`
+	ID        int64             `bson:"_id" json:"_id"`
 	Meta      map[string]string `bson:"meta" json:"meta"`
 	Name      string            `bson:"name" json:"name"`
 	Timestamp int64             `bson:"timestamp" json:"timestamp"`
 }
 
 type TestDataset struct {
-	ID     int64          `bson:"_id" json:"_id"`
-	Size   int64          `bson:"size" json:"size"`
-	Messages []TestRecord `bson:"messages" json:"messages"`
+	Timestamp int64        `bson:"timestamp" json:"timestamp"`
+	ID        int64        `bson:"_id" json:"_id"`
+	Size      int64        `bson:"size" json:"size"`
+	Messages  []TestRecord `bson:"messages" json:"messages"`
 }
 
 var db Mongodb
@@ -38,9 +39,12 @@ const metaID_str = "0"
 var empty_next = map[string]string{"next_stream": ""}
 
 var rec1 = TestRecord{1, empty_next, "aaa", 0}
-var rec_finished = TestRecord{2, map[string]string{"next_stream": "next1"}, finish_stream_keyword, 0}
+var rec1_later = TestRecord{1, empty_next, "aaa", 1}
+var rec_finished = TestRecord{2, map[string]string{"next_stream": "next1"}, finish_stream_keyword, 2}
 var rec2 = TestRecord{2, empty_next, "bbb", 1}
 var rec3 = TestRecord{3, empty_next, "ccc", 2}
+var rec_finished3 = TestRecord{3, map[string]string{"next_stream": "next1"}, finish_stream_keyword, 2}
+var rec_finished11 = TestRecord{11, map[string]string{"next_stream": "next1"}, finish_stream_keyword, 2}
 
 var rec1_expect, _ = json.Marshal(rec1)
 var rec2_expect, _ = json.Marshal(rec2)
@@ -149,7 +153,31 @@ func TestMongoDBGetNextErrorOnFinishedStream(t *testing.T) {
 	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next"})
 
 	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
-	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":2,\"id_max\":2,\"next_stream\":\"next1\"}", err.(*DBError).Message)
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
+}
+
+func TestMongoDBGetByIdErrorOnFinishedStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", ExtraParam: "2"})
+
+	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
+}
+
+func TestMongoDBGetLastErrorOnFinishedStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last"})
+	fmt.Println(string(res))
+	assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+	assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.(*DBError).Message)
 }
 
 func TestMongoDBGetNextErrorOnNoMoreData(t *testing.T) {
@@ -188,7 +216,7 @@ func insertRecords(n int) {
 	records := make([]TestRecord, n)
 	for ind, record := range records {
 		record.ID = int64(ind) + 1
-		record.Name = string(ind)
+		record.Name = fmt.Sprint(ind)
 		if err := db.insertRecord(dbname, collection, &record); err != nil {
 			fmt.Println("error at insert ", ind)
 		}
@@ -392,6 +420,56 @@ func TestMongoDBGetSize(t *testing.T) {
 	assert.Equal(t, string(recs1_expect), string(res))
 }
 
+func TestMongoDBGetSizeWithFinishedStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size"})
+	assert.Nil(t, err)
+	var rec_expect, _ = json.Marshal(&SizeRecord{1})
+	assert.Equal(t, string(rec_expect), string(res))
+}
+
+func TestMongoDBGetSizeForDatasets(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	db.insertRecord(dbname, collection, &rec1)
+
+	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "false"})
+	assert.Equal(t, utils.StatusWrongInput, err.(*DBError).Code)
+
+	_, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+	assert.Equal(t, utils.StatusWrongInput, err1.(*DBError).Code)
+}
+
+func TestMongoDBGetSizeForDatasetsWithFinishedStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	res, _ := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+
+	var rec_expect, _ = json.Marshal(&SizeRecord{1})
+	assert.Equal(t, string(rec_expect), string(res))
+}
+
+func TestMongoDBGetSizeDataset(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+
+	db.insertRecord(dbname, collection, &rec_dataset1)
+	db.insertRecord(dbname, collection, &rec_dataset2_incomplete)
+
+	size2_expect, _ := json.Marshal(SizeRecord{2})
+
+	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, Op: "size", ExtraParam: "true"})
+	assert.Nil(t, err)
+	assert.Equal(t, string(size2_expect), string(res))
+}
+
 func TestMongoDBGetSizeNoRecords(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
@@ -562,11 +640,13 @@ func TestMongoDBQueryMessagesOnEmptyDatabase(t *testing.T) {
 	}
 }
 
-var rec_dataset1 = TestDataset{1, 3, []TestRecord{rec1, rec2, rec3}}
-var rec_dataset1_incomplete = TestDataset{1, 4, []TestRecord{rec1, rec2, rec3}}
-var rec_dataset2_incomplete = TestDataset{2, 4, []TestRecord{rec1, rec2, rec3}}
-var rec_dataset2 = TestDataset{2, 4, []TestRecord{rec1, rec2, rec3}}
-var rec_dataset3 = TestDataset{3, 3, []TestRecord{rec3, rec2, rec2}}
+var rec_dataset1 = TestDataset{0, 1, 3, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset1_incomplete = TestDataset{1, 1, 4, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset2_incomplete = TestDataset{2, 2, 4, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset2 = TestDataset{1, 2, 4, []TestRecord{rec1, rec2, rec3}}
+var rec_dataset3 = TestDataset{2, 3, 3, []TestRecord{rec3, rec2, rec2}}
+
+var rec_dataset2_incomplete3 = TestDataset{1, 2, 3, []TestRecord{rec1, rec2}}
 
 func TestMongoDBGetDataset(t *testing.T) {
 	db.Connect(dbaddress)
@@ -615,57 +695,90 @@ func TestMongoDBNoDataOnNotCompletedNextDataset(t *testing.T) {
 	assert.Equal(t, rec_dataset2_incomplete, res)
 }
 
-
-func TestMongoDBReturnInCompletedDataset(t *testing.T) {
+func TestMongoDBGetRecordLastDataSetSkipsIncompleteSets(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
-	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
+	db.insertRecord(dbname, collection, &rec_dataset1)
+	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname,
-		DbCollectionName: collection, GroupId: groupId, Op: "next", DatasetOp: true, MinDatasetSize: 1})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", DatasetOp: true, ExtraParam: "0"})
 
 	assert.Nil(t, err)
+
 	var res TestDataset
 	json.Unmarshal(res_string, &res)
 
-	assert.Equal(t, rec_dataset1_incomplete, res)
+	assert.Equal(t, rec_dataset1, res)
 }
 
-
-func TestMongoDBGetRecordLastDataSetSkipsIncompleteSets(t *testing.T) {
+func TestMongoDBGetRecordLastDataSetReturnsIncompleteSets(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
 	db.insertRecord(dbname, collection, &rec_dataset1)
 	db.insertRecord(dbname, collection, &rec_dataset2)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last", DatasetOp:true, ExtraParam: "0"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
 	var res TestDataset
 	json.Unmarshal(res_string, &res)
 
-	assert.Equal(t, rec_dataset1, res)
+	assert.Equal(t, rec_dataset2, res)
 }
 
-func TestMongoDBGetRecordLastDataSetReturnsIncompleteSets(t *testing.T) {
+func TestMongoDBGetRecordLastDataSetSkipsIncompleteSetsWithMinSize(t *testing.T) {
 	db.Connect(dbaddress)
 	defer cleanup()
 
 	db.insertRecord(dbname, collection, &rec_dataset1)
-	db.insertRecord(dbname, collection, &rec_dataset2)
+	db.insertRecord(dbname, collection, &rec_dataset2_incomplete3)
 
 	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
-		DatasetOp:true,MinDatasetSize: 2,ExtraParam: "0"})
+		DatasetOp: true, MinDatasetSize: 3, ExtraParam: "0"})
 
 	assert.Nil(t, err)
 
 	var res TestDataset
 	json.Unmarshal(res_string, &res)
+	assert.Equal(t, rec_dataset1, res)
+}
 
-	assert.Equal(t, rec_dataset2, res)
+func TestMongoDBGetRecordLastDataSetWithFinishedStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+
+	db.insertRecord(dbname, collection, &rec_dataset1)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+		DatasetOp: true, ExtraParam: "0"})
+
+	assert.NotNil(t, err)
+	if err != nil {
+		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+		assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.Error())
+	}
+}
+
+func TestMongoDBGetRecordLastDataSetWithIncompleteDatasetsAndFinishedStreamReturnsEndofStream(t *testing.T) {
+	db.Connect(dbaddress)
+	defer cleanup()
+
+	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
+	db.insertRecord(dbname, collection, &rec_finished)
+
+	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "last",
+		DatasetOp: true, MinDatasetSize: 2, ExtraParam: "0"})
+
+	assert.NotNil(t, err)
+	if err != nil {
+		assert.Equal(t, utils.StatusNoData, err.(*DBError).Code)
+		assert.Equal(t, "{\"op\":\"get_record_by_id\",\"id\":1,\"id_max\":1,\"next_stream\":\"next1\"}", err.Error())
+	}
 }
 
 func TestMongoDBGetRecordLastDataSetOK(t *testing.T) {
@@ -690,7 +803,7 @@ func TestMongoDBGetDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp:true, ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -706,7 +819,7 @@ func TestMongoDBErrorOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp:true, ExtraParam: "1"})
+	_, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, ExtraParam: "1"})
 
 	assert.Equal(t, utils.StatusPartialData, err.(*DBError).Code)
 
@@ -722,7 +835,7 @@ func TestMongoDBOkOnIncompleteDatasetID(t *testing.T) {
 	defer cleanup()
 	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
 
-	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp:true,MinDatasetSize: 3,ExtraParam: "1"})
+	res_string, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "id", DatasetOp: true, MinDatasetSize: 3, ExtraParam: "1"})
 
 	assert.Nil(t, err)
 
@@ -739,16 +852,20 @@ type Stream struct {
 }
 
 var testsStreams = []struct {
-	from               string
+	from            string
 	streams         []Stream
 	expectedStreams StreamsRecord
-	test               string
-	ok                 bool
+	test            string
+	ok              bool
 }{
 	{"", []Stream{}, StreamsRecord{[]StreamInfo{}}, "no streams", true},
-	{"", []Stream{{"ss1", []TestRecord{rec2, rec1}}}, StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss1", Timestamp: 0}}}, "one stream", true},
-	{"", []Stream{{"ss1", []TestRecord{rec2, rec1}}, {"ss2", []TestRecord{rec2, rec3}}}, StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss1", Timestamp: 0}, StreamInfo{Name: "ss2", Timestamp: 1}}}, "two streams", true},
-	{"ss2", []Stream{{"ss1", []TestRecord{rec1, rec2}}, {"ss2", []TestRecord{rec2, rec3}}}, StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss2", Timestamp: 1}}}, "with from", true},
+	{"", []Stream{{"ss1", []TestRecord{rec2, rec1}}},
+		StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss1", Timestamp: 0, LastId: 2, TimestampLast: 1}}}, "one stream", true},
+	{"", []Stream{{"ss1", []TestRecord{rec2, rec1}},
+		{"ss2", []TestRecord{rec2, rec3}}},
+		StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss1", Timestamp: 0, LastId: 2, TimestampLast: 1},
+			StreamInfo{Name: "ss2", Timestamp: 1, LastId: 3, TimestampLast: 2}}}, "two streams", true},
+	{"ss2", []Stream{{"ss1", []TestRecord{rec1, rec2}}, {"ss2", []TestRecord{rec2, rec3}}}, StreamsRecord{[]StreamInfo{StreamInfo{Name: "ss2", Timestamp: 1, LastId: 3, TimestampLast: 2}}}, "with from", true},
 }
 
 func TestMongoDBListStreams(t *testing.T) {
@@ -777,11 +894,13 @@ func TestMongoDBAckMessage(t *testing.T) {
 	defer cleanup()
 
 	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection, &rec_finished)
+
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 
 	request := Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "ackmessage", ExtraParam: query_str}
 	res, err := db.ProcessRequest(request)
-	nacks, _ := db.getNacks(request, 1, 1)
+	nacks, _ := db.getNacks(request, 0, 0)
 	assert.Nil(t, err)
 	assert.Equal(t, "", string(res))
 	assert.Equal(t, 0, len(nacks))
@@ -813,6 +932,7 @@ func TestMongoDBNacks(t *testing.T) {
 		db.Connect(dbaddress)
 		if test.insertRecords {
 			insertRecords(10)
+			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
 			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
@@ -847,6 +967,7 @@ func TestMongoDBLastAcks(t *testing.T) {
 		db.Connect(dbaddress)
 		if test.insertRecords {
 			insertRecords(10)
+			db.insertRecord(dbname, collection, &rec_finished11)
 		}
 		if test.ackRecords {
 			db.ackRecord(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, ExtraParam: "{\"Id\":2,\"Op\":\"ackmessage\"}"})
@@ -920,6 +1041,7 @@ func TestMongoDBGetNextReturnsToNormalAfterUsesInprocessed(t *testing.T) {
 	defer cleanup()
 	err := db.insertRecord(dbname, collection, &rec1)
 	db.insertRecord(dbname, collection, &rec2)
+	db.insertRecord(dbname, collection, &rec_finished3)
 	res, err := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
 	time.Sleep(time.Second)
 	res1, err1 := db.ProcessRequest(Request{DbName: dbname, DbCollectionName: collection, GroupId: groupId, Op: "next", ExtraParam: "1000_3"})
diff --git a/broker/src/asapo_broker/database/streams_test.go b/broker/src/asapo_broker/database/streams_test.go
index c172adf5483c2de061f64f55021663f74988a34b..c972ba789f8d4b77e44f3c9d8b5a26416db770c9 100644
--- a/broker/src/asapo_broker/database/streams_test.go
+++ b/broker/src/asapo_broker/database/streams_test.go
@@ -3,6 +3,7 @@
 package database
 
 import (
+	"fmt"
 	"github.com/stretchr/testify/suite"
 	"testing"
 	"time"
@@ -18,7 +19,7 @@ func (suite *StreamsTestSuite) SetupTest() {
 
 func (suite *StreamsTestSuite) TearDownTest() {
 	cleanup()
-	streams.records= map[string]StreamsRecord{}
+	streams.records = map[string]StreamsRecord{}
 }
 
 func TestStreamsTestSuite(t *testing.T) {
@@ -26,16 +27,16 @@ func TestStreamsTestSuite(t *testing.T) {
 }
 
 func (suite *StreamsTestSuite) TestStreamsEmpty() {
-	rec, err := streams.getStreams(&db, "test", "")
+	rec, err := streams.getStreams(&db, Request{DbName: "test", ExtraParam: ""})
 	suite.Nil(err)
 	suite.Empty(rec.Streams, 0)
 }
 
 func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenEmpty() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
-	streams.getStreams(&db, dbname, "")
+	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
-	rec, err := streams.getStreams(&db, dbname, "")
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(1, len(rec.Streams))
 }
@@ -43,20 +44,63 @@ func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenEmpty() {
 func (suite *StreamsTestSuite) TestStreamsUsesCache() {
 	db.settings.UpdateStreamCachePeriodMs = 1000
 	db.insertRecord(dbname, collection, &rec2)
-	streams.getStreams(&db, dbname, "")
+	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
-	rec, err := streams.getStreams(&db, dbname, "")
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(1), rec.Streams[0].Timestamp)
+	suite.Equal(false, rec.Streams[0].Finished)
+	suite.Equal(int64(2), rec.Streams[0].LastId)
+	suite.Equal(int64(1), rec.Streams[0].TimestampLast)
+}
+
+func (suite *StreamsTestSuite) TestStreamsGetFinishedInfo() {
+	db.settings.UpdateStreamCachePeriodMs = 1000
+	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection, &rec_finished)
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	suite.Nil(err)
+	suite.Equal(int64(0), rec.Streams[0].Timestamp)
+	suite.Equal(true, rec.Streams[0].Finished)
+	suite.Equal("next1", rec.Streams[0].NextStream)
+}
+
+
+func (suite *StreamsTestSuite) TestStreamsDataSetsGetFinishedInfo() {
+	db.settings.UpdateStreamCachePeriodMs = 1000
+	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
+	db.insertRecord(dbname, collection, &rec_finished)
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
+	suite.Nil(err)
+	suite.Equal(int64(1), rec.Streams[0].Timestamp)
+	suite.Equal(int64(2), rec.Streams[0].TimestampLast)
+	suite.Equal(true, rec.Streams[0].Finished)
+	suite.Equal("next1", rec.Streams[0].NextStream)
+	suite.Equal(int64(1), rec.Streams[0].LastId)
+}
+
+func (suite *StreamsTestSuite) TestStreamsMultipleRequests() {
+	db.settings.UpdateStreamCachePeriodMs = 1000
+	db.insertRecord(dbname, collection, &rec_dataset1_incomplete)
+	db.insertRecord(dbname, collection, &rec_finished)
+	db.insertRecord(dbname, collection2, &rec_dataset1_incomplete)
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: "_unfinished"})
+	rec2, err2 := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: "_finished"})
+	suite.Nil(err)
+	suite.Equal(collection2, rec.Streams[0].Name)
+	suite.Equal(1, len(rec.Streams))
+	suite.Nil(err2)
+	suite.Equal(1, len(rec2.Streams))
+	suite.Equal(collection, rec2.Streams[0].Name)
 }
 
 func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenExpired() {
 	db.settings.UpdateStreamCachePeriodMs = 10
 	db.insertRecord(dbname, collection, &rec2)
-	streams.getStreams(&db, dbname, "")
+	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	db.insertRecord(dbname, collection, &rec1)
 	time.Sleep(time.Millisecond * 100)
-	rec, err := streams.getStreams(&db, dbname, "")
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Equal(int64(1), rec.Streams[0].Timestamp)
 }
@@ -64,9 +108,50 @@ func (suite *StreamsTestSuite) TestStreamsNotUsesCacheWhenExpired() {
 func (suite *StreamsTestSuite) TestStreamRemovesDatabase() {
 	db.settings.UpdateStreamCachePeriodMs = 0
 	db.insertRecord(dbname, collection, &rec1)
-	streams.getStreams(&db, dbname, "")
+	streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	db.dropDatabase(dbname)
-	rec, err := streams.getStreams(&db, dbname, "")
+	rec, err := streams.getStreams(&db, Request{DbName: dbname, ExtraParam: ""})
 	suite.Nil(err)
 	suite.Empty(rec.Streams, 0)
 }
+
+var streamFilterTests=[]struct{
+	request Request
+	error bool
+	streams []string
+	message string
+}{
+	{request: Request{DbName:dbname, ExtraParam:""},error: false,streams: []string{collection,collection2},message: "default all streams"},
+	{request: Request{DbName:dbname, ExtraParam:"_"},error: false,streams: []string{collection,collection2},message: "default _ all streams"},
+	{request: Request{DbName:dbname, ExtraParam:collection},error: false,streams: []string{collection,collection2},message: "first parameter only -  all streams"},
+	{request: Request{DbName:dbname, ExtraParam:"_all"},error: false,streams: []string{collection,collection2},message: "second parameter only -  all streams"},
+	{request: Request{DbName:dbname, ExtraParam:"_finished"},error: false,streams: []string{collection2},message: "second parameter only -  finished streams"},
+	{request: Request{DbName:dbname, ExtraParam:"_unfinished"},error: false,streams: []string{collection},message: "second parameter only -  unfinished streams"},
+	{request: Request{DbName:dbname, ExtraParam:collection2+"_all"},error: false,streams: []string{collection2},message: "from stream2"},
+	{request: Request{DbName:dbname, ExtraParam:collection2+"_unfinished"},error: false,streams: []string{},message: "from stream2 and filter"},
+	{request: Request{DbName:dbname, ExtraParam:collection2+"_bla"},error: true,streams: []string{},message: "wrong filter"},
+	{request: Request{DbName:dbname, ExtraParam:collection2+"_all_aaa"},error: true,streams: []string{},message: "wrong filter2"},
+	{request: Request{DbName:dbname, ExtraParam:"blabla"},error: false,streams: []string{},message: "from unknown stream returns nothing"},
+	{request: Request{DbName:dbname, ExtraParam:collection2+"_"},error: false,streams: []string{collection2},message: "from stream2, first parameter only"},
+}
+
+func (suite *StreamsTestSuite) TestStreamFilters() {
+	db.insertRecord(dbname, collection, &rec1)
+	db.insertRecord(dbname, collection2, &rec1_later)
+	db.insertRecord(dbname, collection2, &rec_finished)
+	for _, test := range streamFilterTests {
+		rec, err := streams.getStreams(&db, test.request)
+		if test.error {
+			suite.NotNil(err,test.message)
+			continue
+		}
+		if err!=nil {
+			fmt.Println(err.Error())
+		}
+		streams:=make([]string,0)
+		for _,si:=range rec.Streams {
+			streams=append(streams,si.Name)
+		}
+		suite.Equal(test.streams,streams,test.message)
+	}
+}
\ No newline at end of file
diff --git a/broker/src/asapo_broker/go.mod b/broker/src/asapo_broker/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..c8b56219b5cb323e672fe6ef4553a0e8ae9ac766
--- /dev/null
+++ b/broker/src/asapo_broker/go.mod
@@ -0,0 +1,17 @@
+module asapo_broker
+
+go 1.16
+
+replace asapo_common v0.0.0 => ../../../common/go/src/asapo_common
+
+require (
+	asapo_common v0.0.0
+	github.com/blastrain/vitess-sqlparser v0.0.0-20201030050434-a139afbb1aba
+	github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
+	github.com/gorilla/mux v1.8.0
+	github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab
+	github.com/rs/xid v1.2.1
+	github.com/sirupsen/logrus v1.8.0 // indirect
+	github.com/stretchr/testify v1.7.0
+	go.mongodb.org/mongo-driver v1.4.6
+)
diff --git a/broker/src/asapo_broker/go.sum b/broker/src/asapo_broker/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..91043be57853e6ad739921a2277e8c718f1e8a84
--- /dev/null
+++ b/broker/src/asapo_broker/go.sum
@@ -0,0 +1,142 @@
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/aws/aws-sdk-go v1.34.28 h1:sscPpn/Ns3i0F4HPEWAVcwdIRaZZCuL7llJ2/60yPIk=
+github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
+github.com/blastrain/vitess-sqlparser v0.0.0-20201030050434-a139afbb1aba h1:hBK2BWzm0OzYZrZy9yzvZZw59C5Do4/miZ8FhEwd5P8=
+github.com/blastrain/vitess-sqlparser v0.0.0-20201030050434-a139afbb1aba/go.mod h1:FGQp+RNQwVmLzDq6HBrYCww9qJQyNwH9Qji/quTQII4=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
+github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
+github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
+github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg=
+github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs=
+github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk=
+github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28=
+github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo=
+github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk=
+github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw=
+github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360=
+github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg=
+github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE=
+github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8=
+github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ=
+github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0=
+github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
+github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
+github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab h1:HqW4xhhynfjrtEiiSGcQUd6vrK23iMam1FO8rI7mwig=
+github.com/influxdata/influxdb1-client v0.0.0-20200827194710-b269163b24ab/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
+github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
+github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
+github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
+github.com/juju/errors v0.0.0-20170703010042-c7d06af17c68 h1:d2hBkTvi7B89+OXY8+bBBshPlc+7JYacGrG/dFak8SQ=
+github.com/juju/errors v0.0.0-20170703010042-c7d06af17c68/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
+github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
+github.com/juju/testing v0.0.0-20191001232224-ce9dec17d28b/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
+github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
+github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
+github.com/klauspost/compress v1.9.5 h1:U+CaK85mrNNb4k8BNOfgJtJ/gr6kswUCFj6miSzVC6M=
+github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/magefile/mage v1.10.0 h1:3HiXzCUY12kh9bIuyXShaVe529fJfyqoVM42o/uom2g=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
+github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
+github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
+github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
+github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
+github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
+github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
+github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
+github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
+github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
+github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
+github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=
+github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
+github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc h1:n+nNi93yXLkJvKwXNP9d55HC7lGK4H/SRcwB5IaUZLo=
+github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
+go.mongodb.org/mongo-driver v1.4.6 h1:rh7GdYmDrb8AQSkF8yteAus8qYOgOASWDOv1BWqBXkU=
+go.mongodb.org/mongo-driver v1.4.6/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
+golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5 h1:8dUaAV7K4uHsF56JQWkprecIQKdPHtR9jCHF5nB8uzc=
+golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.0.0-20180302201248-b7ef84aaf62a/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/broker/src/asapo_broker/server/authorizer.go b/broker/src/asapo_broker/server/authorizer.go
new file mode 100644
index 0000000000000000000000000000000000000000..07cf4ddfe63db13a9eef3aa4b6b7201d38f3c934
--- /dev/null
+++ b/broker/src/asapo_broker/server/authorizer.go
@@ -0,0 +1,125 @@
+package server
+
+import (
+	"asapo_common/structs"
+	"bytes"
+	"encoding/json"
+	"errors"
+	"io"
+	"net/http"
+	"sync"
+	"time"
+)
+
+type Token struct {
+	structs.IntrospectTokenResponse
+}
+
+type Authorizer interface {
+	AuthorizeToken(tokenJWT string) (token Token, err error)
+}
+
+type HttpClient interface {
+	Do(req *http.Request) (*http.Response, error)
+}
+
+type HttpError struct{
+	err error
+	statusCode int
+}
+
+func (m *HttpError) Error() string {
+	return m.err.Error()
+}
+
+
+type AsapoAuthorizer struct {
+	serverUrl string
+	httpClient HttpClient
+}
+
+type cachedToken struct{
+	Token
+	lastUpdate time.Time
+}
+
+var cachedTokens  = struct {
+	tokens map[string]cachedToken
+	cachedTokensLock sync.RWMutex
+}{tokens:make(map[string]cachedToken,0)}
+
+func getCachedToken(tokenJWT string)(token Token, ok bool) {
+	cachedTokens.cachedTokensLock.RLock()
+	defer cachedTokens.cachedTokensLock.RUnlock()
+	cachedToken,ok:=cachedTokens.tokens[tokenJWT]
+	if !ok{
+		return  token,false
+	}
+	if time.Now().Sub(cachedToken.lastUpdate) < 10000*time.Second {
+		return cachedToken.Token, true
+	}
+	return token,false
+}
+
+func cacheToken(tokenJWT string,token Token) {
+	cachedTokens.cachedTokensLock.Lock()
+	defer cachedTokens.cachedTokensLock.Unlock()
+
+	cachedTokens.tokens[tokenJWT] = cachedToken{
+		Token:      token,
+		lastUpdate: time.Now(),
+	}
+}
+
+func (a * AsapoAuthorizer) doRequest(req *http.Request) (token Token, err error) {
+	resp, err := a.httpClient.Do(req)
+	if err != nil {
+		return token, err
+	}
+	defer resp.Body.Close()
+
+	body, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return token, err
+	}
+
+	if resp.StatusCode != http.StatusOK {
+		return token, &HttpError{errors.New("returned " + resp.Status + ": " + string(body)),resp.StatusCode}
+	}
+
+	err = json.Unmarshal(body, &token)
+	return
+}
+func createIntrospectTokenRequest(tokenJWT string) (*http.Request, error) {
+	path := "http://"+settings.AuthorizationServer + "/introspect"
+	request := struct {
+		Token string
+	}{tokenJWT}
+	json_data, _ := json.Marshal(request)
+	req, err := http.NewRequest("POST", path, bytes.NewBuffer(json_data))
+	if err != nil {
+		return nil, err
+	}
+	req.Header.Add("Content-Type", "application/json")
+	return req, nil
+}
+
+func (a * AsapoAuthorizer) AuthorizeToken(tokenJWT string) (token Token, err error) {
+	token,ok := getCachedToken(tokenJWT)
+	if ok {
+		return
+	}
+
+	req, err := createIntrospectTokenRequest(tokenJWT)
+	if err != nil {
+		return
+	}
+
+	token, err = a.doRequest(req)
+	if err == nil {
+		cacheToken(tokenJWT, token)
+	}
+
+	return
+}
+
diff --git a/broker/src/asapo_broker/server/authorizer_test.go b/broker/src/asapo_broker/server/authorizer_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..a58681460c80b6a412091f6982afca7c6eecdc14
--- /dev/null
+++ b/broker/src/asapo_broker/server/authorizer_test.go
@@ -0,0 +1,99 @@
+package server
+
+import (
+	"asapo_common/structs"
+	"bytes"
+	"encoding/json"
+	"errors"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/mock"
+	"io/ioutil"
+	"net/http"
+	"testing"
+)
+
+type MockClient struct {
+	mock.Mock
+}
+
+const expectedAuthorizerUri="http://authorizer:8400/introspect"
+const expectedToken="blabla"
+
+
+func (m *MockClient) Do(req *http.Request) (*http.Response, error) {
+	args := m.Called(req)
+	return args.Get(0).(*http.Response), args.Error(1)
+}
+
+func  matchRequest(req *http.Request) bool {
+	pathOk := req.URL.Scheme+"://"+req.URL.Host+req.URL.Path == expectedAuthorizerUri
+	b,_:=ioutil.ReadAll(req.Body)
+	token := struct {
+		Token string
+	}{}
+	json.Unmarshal(b,&token)
+	tokenOk:= token.Token == expectedToken
+	return pathOk && tokenOk
+}
+
+func responseOk() (*http.Response, error) {
+	token := Token{structs.IntrospectTokenResponse{AccessTypes: []string{"read"},Sub: "subject"}}
+	b,_:=json.Marshal(&token)
+	r := ioutil.NopCloser(bytes.NewReader(b))
+	return &http.Response{
+		StatusCode: http.StatusOK,
+		Body:       r,
+	}, nil
+}
+
+func responseUnauth() (*http.Response, error) {
+	r := ioutil.NopCloser(bytes.NewReader([]byte("wrong JWT token")))
+	return &http.Response{
+		StatusCode: http.StatusUnauthorized,
+		Body:       r,
+	}, nil
+}
+
+func responseErr() (*http.Response, error) {
+	return &http.Response{}, errors.New("cannot connect")
+}
+
+var authTests = []struct {
+	response func ()(*http.Response, error)
+	twice bool
+	ok bool
+	message string
+}{
+	{responseOk,false,true,"ok"},
+	{responseOk,true,true,"second time uses cache"},
+	{responseErr,false,false,"not auth"},
+	{responseUnauth,false,false,"request error"},
+}
+
+func TestAuthorize(t *testing.T) {
+	settings.AuthorizationServer = "authorizer:8400"
+	var client MockClient
+	auth = &AsapoAuthorizer{
+		serverUrl:  expectedAuthorizerUri,
+		httpClient: &client,
+	}
+	for _,test := range authTests {
+		client.On("Do",  mock.MatchedBy(matchRequest)).Once().Return(test.response())
+		token, err := auth.AuthorizeToken(expectedToken)
+		if test.twice {
+			token, err = auth.AuthorizeToken(expectedToken)
+		}
+		client.AssertExpectations(t)
+		client.ExpectedCalls = nil
+		if test.ok {
+			assert.Nil(t,err,test.message)
+			assert.Equal(t,"subject",token.Sub,test.message)
+			assert.Contains(t,token.AccessTypes,"read",test.message)
+		} else {
+			assert.NotNil(t,err,test.message)
+		}
+		delete(cachedTokens.tokens, expectedToken)
+	}
+
+}
+
diff --git a/broker/src/asapo_broker/server/get_commands_test.go b/broker/src/asapo_broker/server/get_commands_test.go
index e4db0514b53c85393ce47e55c7f7de02e99da6ae..c472ddb4c3948004c412ea6267da70a582a72084 100644
--- a/broker/src/asapo_broker/server/get_commands_test.go
+++ b/broker/src/asapo_broker/server/get_commands_test.go
@@ -47,10 +47,10 @@ var testsGetCommand = []struct {
 	{"next", expectedStream, expectedGroupID, expectedStream + "/" + expectedGroupID + "/next","",""},
 	{"next", expectedStream, expectedGroupID, expectedStream + "/" +
 		expectedGroupID + "/next","&resend_nacks=true&delay_ms=10000&resend_attempts=3","10000_3"},
-	{"size", expectedStream, "", expectedStream  + "/size","","0"},
-	{"streams", "0", "", "0/streams","",""},
+	{"size", expectedStream, "", expectedStream  + "/size","",""},
+	{"size", expectedStream, "", expectedStream  + "/size","&incomplete=true","true"},
+	{"streams", "0", "", "0/streams","","_"},
 	{"lastack", expectedStream, expectedGroupID, expectedStream + "/" + expectedGroupID + "/lastack","",""},
-
 }
 
 
@@ -58,7 +58,7 @@ func (suite *GetCommandsTestSuite) TestGetCommandsCallsCorrectRoutine() {
 	for _, test := range testsGetCommand {
 		suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: test.stream, GroupId: test.groupid, Op: test.command, ExtraParam: test.externalParam}).Return([]byte("Hello"), nil)
 		logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request "+test.command)))
-		w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + test.reqString+correctTokenSuffix+test.queryParams)
+		w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + test.reqString+correctTokenSuffix+test.queryParams)
 		suite.Equal(http.StatusOK, w.Code, test.command+ " OK")
 		suite.Equal("Hello", string(w.Body.Bytes()), test.command+" sends data")
 	}
diff --git a/broker/src/asapo_broker/server/get_health.go b/broker/src/asapo_broker/server/get_health.go
index ce5f45ee531005cb5817410052b89b3449761028..c4ff315d16533e967019c3e4bbf60cc8404e775d 100644
--- a/broker/src/asapo_broker/server/get_health.go
+++ b/broker/src/asapo_broker/server/get_health.go
@@ -9,6 +9,5 @@ func routeGetHealth(w http.ResponseWriter, r *http.Request) {
 	if err != nil {
 		ReconnectDb()
 	}
-	r.Header.Set("Content-type", "application/json")
 	w.WriteHeader(http.StatusNoContent)
 }
diff --git a/broker/src/asapo_broker/server/get_health_test.go b/broker/src/asapo_broker/server/get_health_test.go
index a318c8d953d8ce3abc179f7fbdc20076a17b3785..67ade394949cf19882fa0fb0b1cf8ea5a3132ef3 100644
--- a/broker/src/asapo_broker/server/get_health_test.go
+++ b/broker/src/asapo_broker/server/get_health_test.go
@@ -34,7 +34,7 @@ func TestGetHealthTestSuite(t *testing.T) {
 
 func (suite *GetHealthTestSuite) TestGetHealthOk() {
 	suite.mock_db.On("Ping").Return(nil)
-	w := doRequest("/health")
+	w := doRequest("/health","GET","","")
 	suite.Equal(http.StatusNoContent, w.Code)
 }
 
@@ -44,6 +44,6 @@ func (suite *GetHealthTestSuite) TestGetHealthTriesToReconnectsToDataBase() {
 
 	ExpectReconnect(suite.mock_db)
 
-	w := doRequest("/health")
+	w := doRequest("/health","GET","","")
 	suite.Equal(http.StatusNoContent, w.Code)
 }
diff --git a/broker/src/asapo_broker/server/get_meta_test.go b/broker/src/asapo_broker/server/get_meta_test.go
index 4eb0e16547b60e7abb07a60f8667d7aadf9ef1bd..550efb653178533801ed305926cedd2ae1609cbf 100644
--- a/broker/src/asapo_broker/server/get_meta_test.go
+++ b/broker/src/asapo_broker/server/get_meta_test.go
@@ -35,7 +35,7 @@ func TestGetMetaTestSuite(t *testing.T) {
 func (suite *GetMetaTestSuite) TestGetMetaOK() {
 	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, Op: "meta", ExtraParam: "1"}).Return([]byte(""), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request meta")))
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/0/meta"  + "/1" + correctTokenSuffix,"GET")
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/0/meta"  + "/1" + correctTokenSuffix,"GET")
 	suite.Equal(http.StatusOK, w.Code, "meta OK")
 }
 
diff --git a/broker/src/asapo_broker/server/get_size.go b/broker/src/asapo_broker/server/get_size.go
index fa4dd2367cf8a1115e6c740e060f6a4ff3680a7a..1355e955bd08a5dda6e560976586d3e2775168ba 100644
--- a/broker/src/asapo_broker/server/get_size.go
+++ b/broker/src/asapo_broker/server/get_size.go
@@ -5,5 +5,7 @@ import (
 )
 
 func routeGetSize(w http.ResponseWriter, r *http.Request) {
-	processRequest(w, r, "size", "0", false)
+	keys := r.URL.Query()
+	incomplete := keys.Get("incomplete")
+	processRequest(w, r, "size", incomplete, false)
 }
diff --git a/broker/src/asapo_broker/server/get_streams.go b/broker/src/asapo_broker/server/get_streams.go
index 335f15a6eff8b6698bdc338632d6d360b7891b5a..a22274553f58663c2bdbd830c246344b48f0dea9 100644
--- a/broker/src/asapo_broker/server/get_streams.go
+++ b/broker/src/asapo_broker/server/get_streams.go
@@ -7,5 +7,6 @@ import (
 func routeGetStreams(w http.ResponseWriter, r *http.Request) {
 	keys := r.URL.Query()
 	from := keys.Get("from")
-	processRequest(w, r, "streams", from, false)
+	filter := keys.Get("filter")
+	processRequest(w, r, "streams", from+"_"+filter, false)
 }
diff --git a/broker/src/asapo_broker/server/listroutes.go b/broker/src/asapo_broker/server/listroutes.go
index 8d782f58ec83f4351f898c71d7979be495cdc695..b2d87ccf2d9a1d2f1bad08ba88c567658676c6ef 100644
--- a/broker/src/asapo_broker/server/listroutes.go
+++ b/broker/src/asapo_broker/server/listroutes.go
@@ -8,73 +8,73 @@ var listRoutes = utils.Routes{
 	utils.Route{
 		"GetNext",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/{groupid}/next",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/{groupid}/next",
 		routeGetNext,
 	},
 	utils.Route{
 		"GetSize",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/size",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/size",
 		routeGetSize,
 	},
 	utils.Route{
 		"GetStreams",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/streams",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/streams",
 		routeGetStreams,
 	},
 	utils.Route{
 		"GetLast",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/0/last",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/0/last",
 		routeGetLast,
 	},
 	utils.Route{
 		"GetLastAck",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/{groupid}/lastack",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/{groupid}/lastack",
 		routeGetLastAck,
 	},
 	utils.Route{
 		"GetNacks",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/{groupid}/nacks",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/{groupid}/nacks",
 		routeGetNacks,
 	},
 	utils.Route{
 		"GetID",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/0/{id}",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/0/{id}",
 		routeGetByID,
 	},
 	utils.Route{
 		"GetMeta",
 		"Get",
-		"/database/{dbname}/{datasource}/{stream}/0/meta/{id}",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/0/meta/{id}",
 		routeGetMeta,
 	},
 	utils.Route{
 		"CreateGroup",
 		"Post",
-		"/creategroup",
+		"/{apiver}/creategroup",
 		routeCreateGroupID,
 	},
 	utils.Route{
 		"QueryMessages",
 		"Post",
-		"/database/{dbname}/{datasource}/{stream}/0/querymessages",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/0/querymessages",
 		routeQueryMessages,
 	},
 	utils.Route{
 		"ResetConter",
 		"Post",
-		"/database/{dbname}/{datasource}/{stream}/{groupid}/resetcounter",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/{groupid}/resetcounter",
 		routeResetCounter,
 	},
 	utils.Route{
 		"MessageOp",
 		"Post",
-		"/database/{dbname}/{datasource}/{stream}/{groupid}/{id}",
+		"/{apiver}/beamtime/{beamtime}/{datasource}/{stream}/{groupid}/{id}",
 		routeMessageOp,
 	},
 	utils.Route{
diff --git a/broker/src/asapo_broker/server/post_create_group.go b/broker/src/asapo_broker/server/post_create_group.go
index b9cfb51f7d10ef99083ffa9a28ec937efff816da..008e72f14d4bf36022a094c923ab301d7ed2bf36 100644
--- a/broker/src/asapo_broker/server/post_create_group.go
+++ b/broker/src/asapo_broker/server/post_create_group.go
@@ -7,6 +7,11 @@ import (
 )
 
 func routeCreateGroupID(w http.ResponseWriter, r *http.Request) {
+	if ok := checkBrokerApiVersion(w, r); !ok {
+		return
+	}
+
+
 	guid := xid.New()
 	w.Write([]byte(guid.String()))
 	logger.Debug("generated new group: " + guid.String())
diff --git a/broker/src/asapo_broker/server/post_create_group_test.go b/broker/src/asapo_broker/server/post_create_group_test.go
index 5f19da351eedf60bc8992a3b7827982d659be194..46f6fb09edd9241e5f54aad5dc7e89192db03152 100644
--- a/broker/src/asapo_broker/server/post_create_group_test.go
+++ b/broker/src/asapo_broker/server/post_create_group_test.go
@@ -32,3 +32,8 @@ func TestGetNewGroup(t *testing.T) {
 
 	logger.UnsetMockLog()
 }
+
+func TestGetNewGroupWrongProtocol(t *testing.T) {
+	w := doRequest("/creategroup", "POST","","/v0.2")
+	assert.Equal(t, http.StatusUnsupportedMediaType, w.Code, "wrong request")
+}
diff --git a/broker/src/asapo_broker/server/post_op_image_test.go b/broker/src/asapo_broker/server/post_op_image_test.go
index 259787e41bc9fc41140daf8bdfb844c55f3939d2..fc1a2d4e72499983f88da4eab17786ffd95f6871 100644
--- a/broker/src/asapo_broker/server/post_op_image_test.go
+++ b/broker/src/asapo_broker/server/post_op_image_test.go
@@ -36,19 +36,19 @@ func (suite *MessageOpTestSuite) TestAckMessageOpOK() {
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
 	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream, GroupId: expectedGroupID, Op: "ackmessage", ExtraParam: query_str}).Return([]byte(""), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request ackmessage")))
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
 	suite.Equal(http.StatusOK, w.Code, "ackmessage OK")
 }
 
 
 func (suite *MessageOpTestSuite) TestAckMessageOpErrorWrongOp() {
 	query_str := "\"Id\":1,\"Op\":\"ackmessage\"}"
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/1" + correctTokenSuffix,"POST",query_str)
 	suite.Equal(http.StatusBadRequest, w.Code, "ackmessage wrong")
 }
 
 func (suite *MessageOpTestSuite) TestAckMessageOpErrorWrongID() {
 	query_str := "{\"Id\":1,\"Op\":\"ackmessage\"}"
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/bla" + correctTokenSuffix,"POST",query_str)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/bla" + correctTokenSuffix,"POST",query_str)
 	suite.Equal(http.StatusBadRequest, w.Code, "ackmessage wrong")
 }
diff --git a/broker/src/asapo_broker/server/post_query_images_test.go b/broker/src/asapo_broker/server/post_query_images_test.go
index 0f2b55c1477c4f27747b3b6ce9effccd9c213a2a..16aca9242eebd867c58f90e5860f3bd9c665cca1 100644
--- a/broker/src/asapo_broker/server/post_query_images_test.go
+++ b/broker/src/asapo_broker/server/post_query_images_test.go
@@ -38,7 +38,7 @@ func (suite *QueryTestSuite) TestQueryOK() {
 	suite.mock_db.On("ProcessRequest", database.Request{DbName: expectedDBName, DbCollectionName: expectedStream,Op: "querymessages", ExtraParam: query_str}).Return([]byte("{}"), nil)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request querymessages")))
 
-	w := doRequest("/database/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/0/querymessages"+correctTokenSuffix, "POST", query_str)
+	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/0/querymessages"+correctTokenSuffix, "POST", query_str)
 	suite.Equal(http.StatusOK, w.Code, "Query OK")
 }
 
diff --git a/broker/src/asapo_broker/server/post_reset_counter_test.go b/broker/src/asapo_broker/server/post_reset_counter_test.go
index 37f70e2725294ac6aee3ae440000cb24374b16a4..10fb4e1b0a360707df952060f633cb99041d1cfc 100644
--- a/broker/src/asapo_broker/server/post_reset_counter_test.go
+++ b/broker/src/asapo_broker/server/post_reset_counter_test.go
@@ -38,6 +38,6 @@ func (suite *ResetCounterTestSuite) TestResetCounterOK() {
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request resetcounter")))
 
-	w := doRequest("/database/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/"+expectedGroupID+"/resetcounter"+correctTokenSuffix+"&value=10", "POST")
+	w := doRequest("/beamtime/"+expectedBeamtimeId+"/"+expectedSource+"/"+expectedStream+"/"+expectedGroupID+"/resetcounter"+correctTokenSuffix+"&value=10", "POST")
 	suite.Equal(http.StatusOK, w.Code, "ResetCounter OK")
 }
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 3e937b879ac297a709b962f6f5d96b597bb6d76a..87b6a5075c842d859a9d40b47666f2b81f96cd8f 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -5,13 +5,14 @@ import (
 	"asapo_common/logger"
 	log "asapo_common/logger"
 	"asapo_common/utils"
+	"asapo_common/version"
 	"github.com/gorilla/mux"
 	"net/http"
 )
 
 func extractRequestParameters(r *http.Request, needGroupID bool) (string, string, string, string, bool) {
 	vars := mux.Vars(r)
-	db_name, ok1 := vars["dbname"]
+	db_name, ok1 := vars["beamtime"]
 
 	datasource, ok3 := vars["datasource"]
 	stream, ok4 := vars["stream"]
@@ -49,8 +50,17 @@ func checkGroupID(w http.ResponseWriter, needGroupID bool, group_id string, db_n
 	return false
 }
 
+func checkBrokerApiVersion(w http.ResponseWriter, r *http.Request) bool {
+	_, ok := utils.PrecheckApiVersion(w, r, version.GetBrokerApiVersion())
+	return ok
+}
+
 func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_param string, needGroupID bool) {
-	r.Header.Set("Content-type", "application/json")
+	if ok := checkBrokerApiVersion(w, r); !ok {
+		return
+	}
+
+
 	w.Header().Set("Access-Control-Allow-Origin", "*")
 	db_name, datasource, stream, group_id, ok := extractRequestParameters(r, needGroupID)
 	if !ok {
@@ -58,8 +68,8 @@ func processRequest(w http.ResponseWriter, r *http.Request, op string, extra_par
 		return
 	}
 
-	if err := testAuth(r, db_name); err != nil {
-		writeAuthAnswer(w, "get "+op, db_name, err.Error())
+	if err := authorize(r, db_name); err != nil {
+		writeAuthAnswer(w, "get "+op, db_name, err)
 		return
 	}
 
diff --git a/broker/src/asapo_broker/server/process_request_test.go b/broker/src/asapo_broker/server/process_request_test.go
index b4967924720e52f8a431937d207f216687601171..5ce3aa70bb26c91c332a0733d11d17c5b64fc1dd 100644
--- a/broker/src/asapo_broker/server/process_request_test.go
+++ b/broker/src/asapo_broker/server/process_request_test.go
@@ -3,6 +3,7 @@ package server
 import (
 	"asapo_broker/database"
 	"asapo_common/logger"
+	"asapo_common/structs"
 	"asapo_common/utils"
 	"errors"
 	"github.com/stretchr/testify/assert"
@@ -23,17 +24,31 @@ const wrongGroupID = "_bid2a5auidddp1vl71"
 const expectedSource = "datasource"
 const expectedStream = "stream"
 
+type MockAuthServer struct {
+}
+
+func (a * MockAuthServer) AuthorizeToken(tokenJWT string) (token Token, err error) {
+	if tokenJWT =="ok" {
+		return Token{
+			structs.IntrospectTokenResponse{
+			Sub:        "bt_"+expectedBeamtimeId,
+			AccessTypes: []string{"read"},
+			},
+		},nil
+	} else {
+		return Token{},errors.New("wrong JWT token")
+	}
+}
+
+
 func prepareTestAuth() {
 	expectedBeamtimeId = "beamtime_id"
 	expectedDBName = expectedBeamtimeId + "_" + expectedSource
-	auth = utils.NewHMACAuth("secret")
-	token, err := auth.GenerateToken(&expectedBeamtimeId)
-	if err != nil {
-		panic(err)
-	}
-	correctTokenSuffix = "?token=" + token
+
+	auth = &MockAuthServer{}
+	correctTokenSuffix = "?token=ok"
 	wrongTokenSuffix = "?blablabla=aa"
-	suffixWithWrongToken = "?token=blabla"
+	suffixWithWrongToken = "?token=wrong"
 }
 
 type request struct {
@@ -63,16 +78,20 @@ func doRequest(path string, extra_params ...string) *httptest.ResponseRecorder {
 	if len(extra_params) > 1 {
 		body = strings.NewReader(extra_params[1])
 	}
+	ver := "/v0.1"
+	if len(extra_params) > 2 {
+		ver = extra_params[2]
+	}
 
 	mux := utils.NewRouter(listRoutes)
-	req, _ := http.NewRequest(m, path, body)
+	req, _ := http.NewRequest(m, ver+path, body)
 	w := httptest.NewRecorder()
 	mux.ServeHTTP(w, req)
 	return w
 }
 
 func TestProcessRequestWithoutDatabaseName(t *testing.T) {
-	w := doRequest("/database/next")
+	w := doRequest("/beamtime/next")
 	assert.Equal(t, http.StatusNotFound, w.Code, "no database name")
 }
 
@@ -107,9 +126,9 @@ func TestProcessRequestTestSuite(t *testing.T) {
 }
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongToken() {
-	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong token")))
+	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong JWT token")))
 
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + suffixWithWrongToken)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + suffixWithWrongToken)
 
 	suite.Equal(http.StatusUnauthorized, w.Code, "wrong token")
 }
@@ -117,7 +136,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongToken() {
 func (suite *ProcessRequestTestSuite) TestProcessRequestWithNoToken() {
 	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("cannot extract")))
 
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + wrongTokenSuffix)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + wrongTokenSuffix)
 
 	suite.Equal(http.StatusUnauthorized, w.Code, "no token")
 }
@@ -131,7 +150,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithWrongDatabaseName()
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next")))
 
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 
 	suite.Equal(http.StatusConflict, w.Code, "wrong database name")
 }
@@ -147,7 +166,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithConnectionError() {
 	ExpectReconnect(suite.mock_db)
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("reconnected")))
 
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 	time.Sleep(time.Second)
 	suite.Equal(http.StatusNotFound, w.Code, "data not found")
 }
@@ -162,7 +181,7 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestWithInternalDBError() {
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("reconnected")))
 
 	ExpectReconnect(suite.mock_db)
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 	time.Sleep(time.Second)
 
 	suite.Equal(http.StatusNotFound, w.Code, "internal error")
@@ -176,13 +195,13 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestAddsCounter() {
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
 
-	doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
+	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix)
 	suite.Equal(1, statistics.GetCounter(), "ProcessRequest increases counter")
 }
 
 func (suite *ProcessRequestTestSuite) TestProcessRequestWrongGroupID() {
 	logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong groupid")))
-	w := doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + wrongGroupID + "/next" + correctTokenSuffix)
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + wrongGroupID + "/next" + correctTokenSuffix)
 	suite.Equal(http.StatusBadRequest, w.Code, "wrong group id")
 }
 
@@ -193,5 +212,11 @@ func (suite *ProcessRequestTestSuite) TestProcessRequestAddsDataset() {
 
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing request next in "+expectedDBName)))
 
-	doRequest("/database/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix + "&dataset=true")
+	doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix + "&dataset=true")
+}
+
+
+func (suite *ProcessRequestTestSuite) TestProcessRequestErrorOnWrongProtocol() {
+	w := doRequest("/beamtime/" + expectedBeamtimeId + "/" + expectedSource + "/" + expectedStream + "/" + expectedGroupID + "/next" + correctTokenSuffix,"GET","","/v0.2")
+	suite.Equal(http.StatusUnsupportedMediaType, w.Code, "wrong protocol")
 }
diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go
index 28f2bc38d729c1cc14cafc6e696bdf71cfc9ccd8..9476a5a7c0ced30369aa3f05be93360188c727a7 100644
--- a/broker/src/asapo_broker/server/request_common.go
+++ b/broker/src/asapo_broker/server/request_common.go
@@ -2,16 +2,23 @@ package server
 
 import (
 	"asapo_common/logger"
+	"asapo_common/utils"
 	"errors"
 	"net/http"
 	"strconv"
 )
 
-func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string, err string) {
+func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string, err error) {
 	log_str := "processing " + requestName + " request in " + db_name + " at " + settings.GetDatabaseServer()
-	logger.Error(log_str + " - " + err)
-	w.WriteHeader(http.StatusUnauthorized)
-	w.Write([]byte(err))
+	logger.Error(log_str + " - " + err.Error())
+
+	httpError, ok := err.(*HttpError)
+	if ok && httpError.statusCode != http.StatusUnauthorized {
+		w.WriteHeader(http.StatusInternalServerError)
+	} else {
+		w.WriteHeader(http.StatusUnauthorized)
+	}
+	w.Write([]byte(err.Error()))
 }
 
 func valueTrue(r *http.Request, key string) bool {
@@ -39,22 +46,40 @@ func valueInt(r *http.Request, key string) int {
 	return i
 }
 
-func datasetRequested(r *http.Request) (bool,int) {
-	return valueTrue(r, "dataset"),valueInt(r,"minsize")
+func datasetRequested(r *http.Request) (bool, int) {
+	return valueTrue(r, "dataset"), valueInt(r, "minsize")
 }
 
-func testAuth(r *http.Request, beamtime_id string) error {
-	token_got := r.URL.Query().Get("token")
+func authorize(r *http.Request, beamtime_id string) error {
+	tokenJWT := r.URL.Query().Get("token")
 
-	if len(token_got) == 0 {
+	if len(tokenJWT) == 0 {
 		return errors.New("cannot extract token from request")
 	}
 
-	token_expect, _ := auth.GenerateToken(&beamtime_id)
+	token, err := auth.AuthorizeToken(tokenJWT)
+	if err != nil {
+		return err
+	}
+
+	err = checkSubject(token.Sub, beamtime_id)
+	if err != nil {
+		return err
+	}
+
+	return checkAccessType(token.AccessTypes)
+}
 
-	if token_got != token_expect {
-		return errors.New("wrong token")
+func checkSubject(subject string, beamtime_id string) error {
+	if subject != utils.SubjectFromBeamtime(beamtime_id) {
+		return errors.New("wrong token subject")
 	}
+	return nil
+}
 
+func checkAccessType(accessTypes []string) error {
+	if !utils.StringInSlice("read",accessTypes) {
+		return errors.New("wrong token access type")
+	}
 	return nil
 }
diff --git a/broker/src/asapo_broker/server/server.go b/broker/src/asapo_broker/server/server.go
index 01bf25de7195193e46041c12e0d00b95f722bc42..1537fa08642c36454b679b54e218d5ad5b6f4007 100644
--- a/broker/src/asapo_broker/server/server.go
+++ b/broker/src/asapo_broker/server/server.go
@@ -3,39 +3,39 @@ package server
 import (
 	"asapo_broker/database"
 	log "asapo_common/logger"
-	"asapo_common/utils"
 	"errors"
 	"io/ioutil"
 	"net/http"
 )
 
-const  kDefaultresendInterval = 10
-const  kDefaultStreamCacheUpdateIntervalMs = 100
+const kDefaultresendInterval = 10
+const kDefaultStreamCacheUpdateIntervalMs = 100
 
 var db database.Agent
 
 type serverSettings struct {
-	DiscoveryServer     string
-	DatabaseServer      string
-	PerformanceDbServer string
-	PerformanceDbName   string
-	SecretFile          string
-	Port                int
-	LogLevel            string
-	discoveredDbAddress string
-	CheckResendInterval *int
+	DiscoveryServer             string
+	DatabaseServer              string
+	PerformanceDbServer         string
+	PerformanceDbName           string
+	MonitorPerformance 			bool
+	AuthorizationServer         string
+	Port                        int
+	LogLevel                    string
+	discoveredDbAddress         string
+	CheckResendInterval         *int
 	StreamCacheUpdateIntervalMs *int
 }
 
 func (s *serverSettings) GetResendInterval() int {
-	if s.CheckResendInterval==nil {
+	if s.CheckResendInterval == nil {
 		return kDefaultresendInterval
 	}
 	return *s.CheckResendInterval
 }
 
 func (s *serverSettings) GetStreamCacheUpdateInterval() int {
-	if s.StreamCacheUpdateIntervalMs==nil {
+	if s.StreamCacheUpdateIntervalMs == nil {
 		return kDefaultStreamCacheUpdateIntervalMs
 	}
 	return *s.StreamCacheUpdateIntervalMs
@@ -51,7 +51,7 @@ func (s *serverSettings) GetDatabaseServer() string {
 
 var settings serverSettings
 var statistics serverStatistics
-var auth utils.Auth
+var auth Authorizer
 
 type discoveryAPI struct {
 	Client  *http.Client
@@ -91,7 +91,7 @@ func InitDB(dbAgent database.Agent) (err error) {
 		log.Debug("Got mongodb server: " + settings.discoveredDbAddress)
 	}
 
-	db.SetSettings(database.DBSettings{ReadFromInprocessPeriod: settings.GetResendInterval(),UpdateStreamCachePeriodMs: settings.GetStreamCacheUpdateInterval()})
+	db.SetSettings(database.DBSettings{ReadFromInprocessPeriod: settings.GetResendInterval(), UpdateStreamCachePeriodMs: settings.GetStreamCacheUpdateInterval()})
 
 	return db.Connect(settings.GetDatabaseServer())
 }
diff --git a/broker/src/asapo_broker/server/server_nottested.go b/broker/src/asapo_broker/server/server_nottested.go
index ffa4df68f9fab3e6fc40dbdbd68c66d8a43a4dc7..ae96af4d5a957f7bc5529ad0f78510a648cdabd0 100644
--- a/broker/src/asapo_broker/server/server_nottested.go
+++ b/broker/src/asapo_broker/server/server_nottested.go
@@ -18,18 +18,16 @@ func StartStatistics() {
 }
 
 func Start() {
-	StartStatistics()
+	if settings.MonitorPerformance {
+		StartStatistics()
+	}
 	mux := utils.NewRouter(listRoutes)
 	log.Info("Listening on port: " + strconv.Itoa(settings.Port))
 	log.Fatal(http.ListenAndServe(":"+strconv.Itoa(settings.Port), http.HandlerFunc(mux.ServeHTTP)))
 }
 
-func createAuth() (utils.Auth, error) {
-	secret, err := utils.ReadFirstStringFromFile(settings.SecretFile)
-	if err != nil {
-		return nil, err
-	}
-	return utils.NewHMACAuth(secret), nil
+func createAuth() Authorizer {
+	return &AsapoAuthorizer{settings.AuthorizationServer,&http.Client{}}
 }
 
 func ReadConfig(fname string) (log.Level, error) {
@@ -61,17 +59,11 @@ func ReadConfig(fname string) (log.Level, error) {
 		return log.FatalLevel, errors.New("PerformanceDbName not set")
 	}
 
-	if settings.SecretFile == "" {
-		return log.FatalLevel, errors.New("Secret file not set")
-	}
-
-	var err error
-	auth, err = createAuth()
-	if err != nil {
-		return log.FatalLevel, err
+	if settings.AuthorizationServer == "" {
+		return log.FatalLevel, errors.New("AuthorizationServer not set")
 	}
 
-	level, err := log.LevelFromString(settings.LogLevel)
+	auth = createAuth()
 
-	return level, err
+	return log.LevelFromString(settings.LogLevel)
 }
diff --git a/broker/src/asapo_broker/server/statistics_nottested.go b/broker/src/asapo_broker/server/statistics_nottested.go
index c2728623d93b161305d5f9285cea9af751350466..3b6d3ee4b65a2c55957ed253cc473a36b91648e8 100644
--- a/broker/src/asapo_broker/server/statistics_nottested.go
+++ b/broker/src/asapo_broker/server/statistics_nottested.go
@@ -9,7 +9,7 @@ import (
 
 func (st *serverStatistics) Monitor() {
 	for {
-		time.Sleep(1000 * time.Millisecond)
+		time.Sleep(10000 * time.Millisecond)
 		if err := st.WriteStatistic(); err != nil {
 		    logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 			log.Error(logstr + " - " + err.Error())
diff --git a/common/cpp/CMakeLists.txt b/common/cpp/CMakeLists.txt
index f59ad1bcf8f18acae4dc8da02ccd73eb654cb55d..8f90ff257a0db963ffc133518e2768c020975fc7 100644
--- a/common/cpp/CMakeLists.txt
+++ b/common/cpp/CMakeLists.txt
@@ -6,6 +6,9 @@ add_subdirectory(src/json_parser)
 
 add_subdirectory(src/data_structs)
 
+add_subdirectory(src/version)
+
+
 add_subdirectory(src/http_client)
 
 add_subdirectory(src/logger)
@@ -20,7 +23,8 @@ endif()
 
 install(DIRECTORY ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/common
         DESTINATION include/asapo
-        PATTERN "*.h.in" EXCLUDE)
+        PATTERN "*.h.in" EXCLUDE
+        PATTERN "*/internal" EXCLUDE)
 
 install(DIRECTORY ${ASAPO_CXX_COMMON_INCLUDE_DIR}/asapo/logger
         DESTINATION include/asapo )
diff --git a/common/cpp/include/asapo/common/data_structs.h b/common/cpp/include/asapo/common/data_structs.h
index 35b5e49d5b29d2c5726168ca1029a880d9f77695..96cdaf8c81ef046c05d7699d6e90004a947d149c 100644
--- a/common/cpp/include/asapo/common/data_structs.h
+++ b/common/cpp/include/asapo/common/data_structs.h
@@ -11,49 +11,52 @@
 
 namespace asapo {
 
+const std::string kFinishStreamKeyword = "asapo_finish_stream";
+const std::string kNoNextStreamKeyword = "asapo_no_next";
+
 class JsonStringParser;
 
 uint64_t NanosecsEpochFromTimePoint(std::chrono::system_clock::time_point);
-uint64_t  EpochNanosecsFromNow();
+uint64_t EpochNanosecsFromNow();
 std::chrono::system_clock::time_point TimePointfromNanosec(uint64_t nanoseconds_from_epoch);
 std::string IsoDateFromEpochNanosecs(uint64_t time_from_epoch_nanosec);
 uint64_t NanosecsEpochFromISODate(std::string date_time);
 
-
-bool TimeFromJson(const JsonStringParser& parser, const std::string& name, std::chrono::system_clock::time_point* val);
+bool TimeFromJson(const JsonStringParser &parser, const std::string &name, std::chrono::system_clock::time_point* val);
 
 class MessageMeta {
-  public:
-    std::string name;
-    std::chrono::system_clock::time_point timestamp;
-    uint64_t size{0};
-    uint64_t id{0};
-    std::string source;
-    std::string metadata;
-    uint64_t buf_id{0};
-    uint64_t dataset_substream{0};
-    std::string Json() const;
-    bool SetFromJson(const std::string& json_string);
-    std::string FullName(const std::string& base_path) const;
+ public:
+  std::string name;
+  std::chrono::system_clock::time_point timestamp;
+  uint64_t size{0};
+  uint64_t id{0};
+  std::string source;
+  std::string metadata;
+  uint64_t buf_id{0};
+  uint64_t dataset_substream{0};
+  std::string Json() const;
+  bool SetFromJson(const std::string &json_string);
+  std::string FullName(const std::string &base_path) const;
 };
 
-
 struct StreamInfo {
-    uint64_t last_id{0};
-    std::string name;
-    std::chrono::system_clock::time_point timestamp_created;
-    std::chrono::system_clock::time_point timestamp_lastentry;
-    std::string Json(bool add_last) const;
-    bool SetFromJson(const std::string& json_string,bool read_last);
+  uint64_t last_id{0};
+  std::string name;
+  bool finished{false};
+  std::string next_stream;
+  std::chrono::system_clock::time_point timestamp_created;
+  std::chrono::system_clock::time_point timestamp_lastentry;
+  std::string Json() const;
+  bool SetFromJson(const std::string &json_string);
 };
 
 using StreamInfos = std::vector<StreamInfo>;
 
-inline bool operator==(const MessageMeta& lhs, const MessageMeta& rhs) {
-    return  (lhs.name == rhs.name &&
-             lhs.id == rhs.id &&
-             lhs.timestamp == rhs.timestamp &&
-             lhs.size == rhs.size);
+inline bool operator==(const MessageMeta &lhs, const MessageMeta &rhs) {
+    return (lhs.name == rhs.name &&
+        lhs.id == rhs.id &&
+        lhs.timestamp == rhs.timestamp &&
+        lhs.size == rhs.size);
 }
 
 using MessageData = std::unique_ptr<uint8_t[]>;
@@ -64,10 +67,10 @@ using MessageMetas = std::vector<MessageMeta>;
 using IdList = std::vector<uint64_t>;
 
 struct DataSet {
-    uint64_t id;
-    uint64_t expected_size;
-    MessageMetas content;
-    bool SetFromJson(const std::string& json_string);
+  uint64_t id;
+  uint64_t expected_size;
+  MessageMetas content;
+  bool SetFromJson(const std::string &json_string);
 };
 
 using SubDirList = std::vector<std::string>;
@@ -77,38 +80,122 @@ enum class SourceType {
   kRaw
 };
 
-Error GetSourceTypeFromString(std::string stype,SourceType *type);
+Error GetSourceTypeFromString(std::string stype, SourceType* type);
 std::string GetStringFromSourceType(SourceType type);
 
 struct SourceCredentials {
-    SourceCredentials(SourceType type, std::string beamtime, std::string beamline, std::string data_source, std::string token):
-        beamtime_id{std::move(beamtime)},
-        beamline{std::move(beamline)},
-        data_source{std::move(data_source)},
-        user_token{std::move(token)},
-        type{type}{};
-    SourceCredentials() {};
-    static const std::string kDefaultStream;
-    static const std::string kDefaultBeamline;
-    static const std::string kDefaultBeamtimeId;
-    std::string beamtime_id;
-    std::string beamline;
-    std::string data_source;
-    std::string user_token;
-    SourceType type = SourceType::kProcessed;
-    std::string GetString() {
-        return (type==SourceType::kRaw?std::string("raw"):std::string("processed")) + "%"+ beamtime_id + "%" + beamline + "%" + data_source + "%" + user_token;
-    };
+  SourceCredentials(SourceType type,
+                    std::string beamtime,
+                    std::string beamline,
+                    std::string data_source,
+                    std::string token) :
+      beamtime_id{std::move(beamtime)},
+      beamline{std::move(beamline)},
+      data_source{std::move(data_source)},
+      user_token{std::move(token)},
+      type{type} {};
+  SourceCredentials() {};
+  static const std::string kDefaultStream;
+  static const std::string kDefaultBeamline;
+  static const std::string kDefaultBeamtimeId;
+  std::string beamtime_id;
+  std::string beamline;
+  std::string data_source;
+  std::string user_token;
+  SourceType type = SourceType::kProcessed;
+  std::string GetString() {
+      return (type == SourceType::kRaw ? std::string("raw") : std::string("processed")) + "%" + beamtime_id + "%"
+          + beamline + "%" + data_source + "%" + user_token;
+  };
 };
 
 enum IngestModeFlags : uint64_t {
-    kTransferData = 1 << 0,
-    kTransferMetaDataOnly = 1 << 1,
-    kStoreInFilesystem = 1 << 2,
-    kStoreInDatabase = 1 << 3,
+  kTransferData = 1 << 0,
+  kTransferMetaDataOnly = 1 << 1,
+  kStoreInFilesystem = 1 << 2,
+  kStoreInDatabase = 1 << 3,
 };
 
 const uint64_t kDefaultIngestMode = kTransferData | kStoreInFilesystem | kStoreInDatabase;
 
+class ClientProtocol {
+ private:
+  std::string version_;
+  std::string discovery_version_;
+  std::string name_;
+ public:
+  ClientProtocol(std::string version, std::string name,std::string discovery_version) : version_{version}, name_{name} {
+      discovery_version_ = discovery_version;
+  };
+  ClientProtocol() = delete;
+  virtual std::string GetString() = 0;
+  const std::string &GetVersion() const {
+      return version_;
+  }
+  const std::string &GetDiscoveryVersion() const {
+      return discovery_version_;
+  }
+  const std::string &GetName() const {
+      return name_;
+  }
+};
+
+class ConsumerProtocol final : public ClientProtocol {
+ private:
+  std::string authorizer_version_;
+  std::string file_transfer_service_version_;
+  std::string broker_version_;
+  std::string rds_version_;
+ public:
+  ConsumerProtocol(std::string version,
+                   std::string discovery_version,
+                   std::string authorizer_version,
+                   std::string file_transfer_service_version,
+                   std::string broker_version,
+                   std::string rds_version)
+      : ClientProtocol(version, "consumer protocol",discovery_version) {
+      authorizer_version_ = authorizer_version;
+      file_transfer_service_version_ = file_transfer_service_version;
+      broker_version_ = broker_version;
+      rds_version_ = rds_version;
+  }
+  const std::string &GetAuthorizerVersion() const {
+      return authorizer_version_;
+  }
+  const std::string &GetFileTransferServiceVersion() const {
+      return file_transfer_service_version_;
+  }
+  const std::string &GetRdsVersion() const {
+      return rds_version_;
+  }
+  const std::string &GetBrokerVersion() const {
+      return broker_version_;
+  };
+  ConsumerProtocol() = delete;
+  std::string GetString() override {
+      return std::string();
+  }
+};
+
+class ProducerProtocol final : public ClientProtocol {
+ private:
+  std::string receiver_version_;
+ public:
+  ProducerProtocol(std::string version,
+                   std::string discovery_version,
+                   std::string receiver_version)
+      : ClientProtocol(version, "producer protocol",discovery_version) {
+      receiver_version_ = receiver_version;
+  };
+  const std::string &GetReceiverVersion() const {
+      return receiver_version_;
+  }
+  ProducerProtocol() = delete;
+  std::string GetString() override {
+      return std::string();
+  }
+};
+
 }
+
 #endif //ASAPO_message_meta_H
diff --git a/common/cpp/include/asapo/common/error.h b/common/cpp/include/asapo/common/error.h
index c2259b79b551f0b3a1449ce30a55443aa75951da..24d78d5399cc7fa64307e44b63692596883dc2d1 100644
--- a/common/cpp/include/asapo/common/error.h
+++ b/common/cpp/include/asapo/common/error.h
@@ -39,7 +39,7 @@ class ErrorInterface {
     virtual std::string Explain() const noexcept = 0;
     virtual void Append(const std::string& value) noexcept = 0;
     virtual ErrorType GetErrorType() const noexcept = 0;
-    virtual const CustomErrorData* GetCustomData() = 0;
+    virtual CustomErrorData* GetCustomData() = 0;
     virtual void SetCustomData(std::unique_ptr<CustomErrorData> data) = 0;
     virtual ~ErrorInterface() = default; // needed for unique_ptr to delete itself
 };
@@ -96,7 +96,7 @@ class SimpleError: public ErrorInterface {
     SimpleError(std::string error, ErrorType error_type ): error_{std::move(error)}, error_type_{error_type} {
     }
 
-    const CustomErrorData* GetCustomData() override {
+    CustomErrorData* GetCustomData() override {
         if (custom_data_) {
             return custom_data_.get();
         } else {
diff --git a/common/cpp/include/asapo/common/internal/version.h.in b/common/cpp/include/asapo/common/internal/version.h.in
new file mode 100644
index 0000000000000000000000000000000000000000..79fcea4136e61555e17ee7fc03f5d14d7f66a54a
--- /dev/null
+++ b/common/cpp/include/asapo/common/internal/version.h.in
@@ -0,0 +1,45 @@
+#ifndef ASAPO_VERSION_H
+#define ASAPO_VERSION_H
+
+#include <iostream>
+#include "string.h"
+
+#include "asapo/common/data_structs.h"
+#include "asapo/common/error.h"
+#include "asapo/http_client/http_client.h"
+
+namespace asapo {
+
+
+const char kVersion[] = "@ASAPO_VERSION@@ASAPO_VERSION_COMMIT@";
+
+inline void ExitAfterPrintVersionIfNeeded(std::string prefix,int argc, char* argv[]) {
+    if (argc == 2 && strcmp(argv[1], "-v") == 0) {
+        std::cout << prefix << ", version " << kVersion << std::endl;
+        exit(0);
+    }
+}
+
+const ConsumerProtocol kConsumerProtocol{"@ASAPO_CONSUMER_PROTOCOL@","@ASAPO_DISCOVERY_API_VER@",
+                                         "@ASAPO_AUTHORIZER_API_VER@","@ASAPO_FILE_TRANSFER_SERVICE_API_VER@","@ASAPO_BROKER_API_VER@","@ASAPO_RDS_API_VER@"};
+const ProducerProtocol kProducerProtocol{"@ASAPO_PRODUCER_PROTOCOL@","@ASAPO_DISCOVERY_API_VER@", "@ASAPO_RECEIVER_API_VER@"};
+
+inline std::string GetReceiverApiVersion() {
+    return "@ASAPO_RECEIVER_API_VER@";
+}
+
+inline std::string GetRdsApiVersion() {
+    return "@ASAPO_RDS_API_VER@";
+}
+
+inline int VersionToNumber(const std::string& version) {
+    return int(atof(version.c_str()+2)*1000);
+}
+
+Error ExtractVersionFromResponse(const std::string &response,
+                                 const std::string &client,
+                                 std::string* server_info,
+                                 bool* supported);
+}
+
+#endif //ASAPO_VERSION_H
diff --git a/common/cpp/include/asapo/common/networking.h b/common/cpp/include/asapo/common/networking.h
index bd7f5379f302f0fbd05e423b270495e699958bb0..79f29abb4cefe2095cb06859f11611d0b229d142 100644
--- a/common/cpp/include/asapo/common/networking.h
+++ b/common/cpp/include/asapo/common/networking.h
@@ -36,6 +36,7 @@ enum NetworkErrorCode : uint16_t {
     kNetErrorReauthorize,
     kNetErrorWarning,
     kNetErrorWrongRequest,
+    kNetErrorNotSupported,
     kNetErrorNoData,
     kNetAuthorizationError,
     kNetErrorInternalServerError = 65535,
@@ -44,6 +45,7 @@ enum NetworkErrorCode : uint16_t {
 //TODO need to use an serialization framework to ensure struct consistency on different computers
 
 const std::size_t kMaxMessageSize = 1024;
+const std::size_t kMaxVersionSize = 10;
 const std::size_t kNCustomParams = 3;
 using CustomRequestData = uint64_t[kNCustomParams];
 const std::size_t kPosIngestMode = 0;
@@ -56,6 +58,7 @@ struct GenericRequestHeader {
         memcpy(custom_data, header.custom_data, kNCustomParams * sizeof(uint64_t)),
         memcpy(message, header.message, kMaxMessageSize);
         strncpy(stream, header.stream, kMaxMessageSize);
+        strncpy(api_version, header.api_version, kMaxVersionSize);
     }
 
     /* Keep in mind that the message here is just strncpy'ed, you can change the message later */
@@ -65,6 +68,7 @@ struct GenericRequestHeader {
         op_code{i_op_code}, data_id{i_data_id}, data_size{i_data_size}, meta_size{i_meta_size} {
         strncpy(message, i_message.c_str(), kMaxMessageSize);
         strncpy(stream, i_stream.c_str(), kMaxMessageSize);
+        strncpy(api_version,"v0.0", kMaxVersionSize);
     }
 
     Opcode      op_code;
@@ -74,6 +78,7 @@ struct GenericRequestHeader {
     CustomRequestData    custom_data;
     char        message[kMaxMessageSize]; /* Can also be a binary message (e.g. MemoryRegionDetails) */
     char        stream[kMaxMessageSize]; /* Must be a string (strcpy is used) */
+    char        api_version[kMaxVersionSize]; /* Must be a string (strcpy is used) */
     std::string Json() {
         std::string s = "{\"id\":" + std::to_string(data_id) + ","
                         "\"buffer\":\"" + std::string(message) + "\"" + ","
@@ -81,7 +86,6 @@ struct GenericRequestHeader {
                         + "}";
         return s;
     };
-
 };
 
 
diff --git a/common/cpp/include/asapo/common/version.h.in b/common/cpp/include/asapo/common/version.h.in
deleted file mode 100644
index cfb735d837ece16c9a92c11dc09900390a1b4e3e..0000000000000000000000000000000000000000
--- a/common/cpp/include/asapo/common/version.h.in
+++ /dev/null
@@ -1,21 +0,0 @@
-#ifndef ASAPO_VERSION_H
-#define ASAPO_VERSION_H
-
-#include <iostream>
-#include "string.h"
-
-namespace asapo {
-
-const char kVersion[] = "@ASAPO_VERSION@@ASAPO_VERSION_COMMIT@";
-
-inline void ExitAfterPrintVersionIfNeeded(std::string prefix,int argc, char* argv[]) {
-    if (argc == 2 && strcmp(argv[1], "-v") == 0) {
-        std::cout << prefix << ", version " << kVersion << std::endl;
-        exit(0);
-    }
-}
-
-}
-
-
-#endif //ASAPO_VERSION_H
diff --git a/common/cpp/include/asapo/request/request.h b/common/cpp/include/asapo/request/request.h
index e67637a1ccd171926b687330045c3c83dc9a78d8..594c03c85b002d12a50b76a7bc1921bed341f8e8 100644
--- a/common/cpp/include/asapo/request/request.h
+++ b/common/cpp/include/asapo/request/request.h
@@ -9,7 +9,6 @@
 
 namespace asapo {
 
-
 class GenericRequest {
   public:
     GenericRequest() = delete;
diff --git a/common/cpp/include/asapo/request/request_pool_error.h b/common/cpp/include/asapo/request/request_pool_error.h
new file mode 100644
index 0000000000000000000000000000000000000000..48c8a56538b41cf909dcf0716e786e06629ab5b8
--- /dev/null
+++ b/common/cpp/include/asapo/request/request_pool_error.h
@@ -0,0 +1,15 @@
+#ifndef ASAPO_REQUEST_POOL_ERROR_H
+#define ASAPO_REQUEST_POOL_ERROR_H
+
+#include "asapo/common/error.h"
+
+namespace asapo {
+
+class OriginalRequest : public CustomErrorData {
+ public:
+  GenericRequestPtr request;
+};
+
+}
+
+#endif //ASAPO_REQUEST_POOL_ERROR_H
diff --git a/common/cpp/src/data_structs/data_structs.cpp b/common/cpp/src/data_structs/data_structs.cpp
index fc7882c29dda4c1a5add7578ca45922fb159a03b..9dea46940fe52b975f7047cac6cf8615da17d253 100644
--- a/common/cpp/src/data_structs/data_structs.cpp
+++ b/common/cpp/src/data_structs/data_structs.cpp
@@ -145,21 +145,24 @@ uint64_t NanosecsEpochFromTimePoint(std::chrono::system_clock::time_point time_p
     return (uint64_t) std::chrono::duration_cast<std::chrono::nanoseconds>(time_point.time_since_epoch()).count();
 }
 
-std::string StreamInfo::Json(bool add_last) const {
+std::string StreamInfo::Json() const {
     auto nanoseconds_from_epoch = NanosecsEpochFromTimePoint(timestamp_created);
     auto nanoseconds_from_epoch_le = NanosecsEpochFromTimePoint(timestamp_lastentry);
-    return (add_last ? "{\"lastId\":" + std::to_string(last_id) + "," : "{") +
+    return ("{\"lastId\":" + std::to_string(last_id) + ","  +
         "\"name\":\"" + name + "\",\"timestampCreated\":" + std::to_string(nanoseconds_from_epoch)
-        + (add_last ? std::string(",") + "\"timestampLast\":" + std::to_string(nanoseconds_from_epoch_le) : "")
-        + "}";
+        + std::string(",") + "\"timestampLast\":" + std::to_string(nanoseconds_from_epoch_le)
+        + ",\"finished\":" + (finished?"true":"false")+ ",\"nextStream\":\"" + next_stream)
+        + "\"}";
 }
 
-bool StreamInfo::SetFromJson(const std::string &json_string, bool read_last) {
+bool StreamInfo::SetFromJson(const std::string &json_string) {
     auto old = *this;
     JsonStringParser parser(json_string);
     uint64_t id;
-    if ((read_last ? parser.GetUInt64("lastId", &last_id) : nullptr) ||
-        (read_last ? !TimeFromJson(parser, "timestampLast", &timestamp_lastentry) : false) ||
+    if (parser.GetUInt64("lastId", &last_id) ||
+        parser.GetBool("finished", &finished) ||
+        parser.GetString("nextStream", &next_stream) ||
+        !TimeFromJson(parser, "timestampLast", &timestamp_lastentry) ||
         parser.GetString("name", &name) ||
         !TimeFromJson(parser, "timestampCreated", &timestamp_created)) {
         *this = old;
diff --git a/common/cpp/src/database/mongodb_client.cpp b/common/cpp/src/database/mongodb_client.cpp
index 442d20b1d27587a500b4aae929323fa8fe8e353e..7cab88b4e40ae6f5e9b3547c65bb784213e6abf8 100644
--- a/common/cpp/src/database/mongodb_client.cpp
+++ b/common/cpp/src/database/mongodb_client.cpp
@@ -357,39 +357,70 @@ Error MongoDBClient::GetDataSetById(const std::string &collection, uint64_t id_i
 
 }
 
-Error StreamInfoFromDbResponse(const std::string &last_record_str,
-                               const std::string &earliest_record_str,
+Error UpdateStreamInfoFromEarliestRecord(const std::string &earliest_record_str,
                                StreamInfo* info) {
-    uint64_t id;
-    std::chrono::system_clock::time_point timestamp_created,timestamp_last;
-
-    auto parser1 = JsonStringParser(last_record_str);
-    Error parse_err = parser1.GetUInt64("_id", &id);
-    if (parse_err) {
-        return DBErrorTemplates::kJsonParseError.Generate(
-            "StreamInfoFromDbResponse: cannot parse mongodb response: " + last_record_str + ": "
-                + parse_err->Explain());
-    }
-    auto ok = TimeFromJson(parser1, "timestamp", &timestamp_last);
+    std::chrono::system_clock::time_point timestamp_created;
+    auto parser = JsonStringParser(earliest_record_str);
+    auto ok = TimeFromJson(parser, "timestamp", &timestamp_created);
     if (!ok) {
         return DBErrorTemplates::kJsonParseError.Generate(
-            "StreamInfoFromDbResponse: cannot parse timestamp in response: " + last_record_str);
+            "UpdateStreamInfoFromEarliestRecord: cannot parse timestamp in response: " + earliest_record_str);
     }
+    info->timestamp_created = timestamp_created;
+    return nullptr;
+}
 
+Error UpdateFinishedStreamInfo(const std::string &metadata,
+                                     StreamInfo* info) {
+    info->finished = true;
+    auto parser = JsonStringParser(metadata);
+    std::string next_stream;
+    auto err = parser.GetString("next_stream", &next_stream);
+    if (err) {
+        return DBErrorTemplates::kJsonParseError.Generate(
+            "UpdateFinishedStreamInfo: cannot parse finished strean meta response: " + metadata);
+    }
+    if (next_stream!=kNoNextStreamKeyword) {
+        info->next_stream = next_stream;
+    }
+    return nullptr;
+}
 
-    auto parser2 = JsonStringParser(earliest_record_str);
-    ok = TimeFromJson(parser2, "timestamp", &timestamp_created);
+Error UpdateStreamInfoFromLastRecord(const std::string &last_record_str,
+                                     StreamInfo* info) {
+    MessageMeta last_message;
+    auto ok = last_message.SetFromJson(last_record_str);
     if (!ok) {
         return DBErrorTemplates::kJsonParseError.Generate(
-            "StreamInfoFromDbResponse: cannot parse timestamp in response: " + earliest_record_str);
+            "UpdateStreamInfoFromLastRecord: cannot parse mongodb response: " + last_record_str);
     }
+    info->last_id = last_message.id;
+    info->timestamp_lastentry = last_message.timestamp;
 
-    info->last_id = id;
-    info->timestamp_created = timestamp_created;
-    info->timestamp_lastentry = timestamp_last;
+    if (last_message.name == kFinishStreamKeyword) {
+        auto err = UpdateFinishedStreamInfo(last_message.metadata, info);
+        if (err) {
+            return err;
+        }
+    }
     return nullptr;
 }
 
+
+Error StreamInfoFromDbResponse(const std::string &last_record_str,
+                               const std::string &earliest_record_str,
+                               StreamInfo* info) {
+    std::chrono::system_clock::time_point timestamp_created;
+
+    auto err = UpdateStreamInfoFromLastRecord(last_record_str,info);
+    if (err) {
+        return err;
+    }
+
+    return UpdateStreamInfoFromEarliestRecord(last_record_str,info);
+
+}
+
 Error MongoDBClient::GetStreamInfo(const std::string &collection, StreamInfo* info) const {
     std::string last_record_str, earliest_record_str;
     auto err = GetRecordFromDb(collection, 0, GetRecordMode::kLast, &last_record_str);
@@ -407,20 +438,33 @@ Error MongoDBClient::GetStreamInfo(const std::string &collection, StreamInfo* in
     return StreamInfoFromDbResponse(last_record_str, earliest_record_str, info);
 }
 
-Error MongoDBClient::UpdateStreamInfo(const char* str, StreamInfo* info) const {
-    std::string stream_name{str};
+bool MongoCollectionIsDataStream(const std::string &stream_name)  {
     std::string prefix = std::string(kDBDataCollectionNamePrefix) + "_";
-    if (stream_name.rfind(prefix, 0) == 0) {
-        std::string record_str;
-        StreamInfo next_info;
-        auto err = GetStreamInfo(stream_name, &next_info);
+    return stream_name.rfind(prefix, 0) == 0;
+}
+
+Error MongoDBClient::UpdateCurrentLastStreamInfo(const std::string& collection_name, StreamInfo* info) const {
+    StreamInfo next_info;
+    auto err = GetStreamInfo(collection_name, &next_info);
+    std::string prefix = std::string(kDBDataCollectionNamePrefix) + "_";
+    if (err) {
+        return err;
+    }
+    if (next_info.timestamp_created > info->timestamp_created) {
+        next_info.name = collection_name.substr(prefix.size());
+        *info = next_info;
+    }
+    return nullptr;
+}
+
+
+Error MongoDBClient::UpdateLastStreamInfo(const char* str, StreamInfo* info) const {
+    std::string collection_name{str};
+    if (MongoCollectionIsDataStream(collection_name)) {
+        auto err = UpdateCurrentLastStreamInfo(collection_name, info);
         if (err) {
             return err;
         }
-        if (next_info.timestamp_created > info->timestamp_created) {
-            next_info.name = stream_name.erase(0, prefix.size());
-            *info = next_info;
-        }
     }
     return nullptr;
 }
@@ -444,7 +488,7 @@ Error MongoDBClient::GetLastStream(StreamInfo* info) const {
     if ((strv = mongoc_database_get_collection_names_with_opts(
         database, opts, &error))) {
         for (auto i = 0; strv[i]; i++) {
-            err = UpdateStreamInfo(strv[i], info);
+            err = UpdateLastStreamInfo(strv[i], info);
             if (err) {
                 break;
             }
diff --git a/common/cpp/src/database/mongodb_client.h b/common/cpp/src/database/mongodb_client.h
index a1b9bb5ef3a306006439170761a60ab86bfc6fed..858bb6d073a329c93e5c159dd945a67d6a0aa3d9 100644
--- a/common/cpp/src/database/mongodb_client.h
+++ b/common/cpp/src/database/mongodb_client.h
@@ -69,7 +69,8 @@ class MongoDBClient final : public Database {
     Error UpdateBsonDocument(uint64_t id, const bson_p& document, bool upsert) const;
     Error AddBsonDocumentToArray(bson_t* query, bson_t* update, bool ignore_duplicates) const;
     Error GetRecordFromDb(const std::string& collection, uint64_t id, GetRecordMode mode, std::string* res) const;
-    Error UpdateStreamInfo(const char *str,StreamInfo* info) const;
+    Error UpdateLastStreamInfo(const char *str, StreamInfo* info) const;
+    Error UpdateCurrentLastStreamInfo(const std::string& collection_name, StreamInfo* info) const;
 
 };
 
diff --git a/common/cpp/src/request/request_pool.cpp b/common/cpp/src/request/request_pool.cpp
index e02001ec0c428ce56f680f95be050baf5899c1c2..5df251934d7b8ca16ced26beda4d7312cfb865f2 100644
--- a/common/cpp/src/request/request_pool.cpp
+++ b/common/cpp/src/request/request_pool.cpp
@@ -1,5 +1,5 @@
 #include "asapo/request/request_pool.h"
-
+#include "asapo/request/request_pool_error.h"
 namespace asapo {
 
 RequestPool::RequestPool(uint8_t n_threads,
@@ -60,6 +60,9 @@ Error RequestPool::CanAddRequest(const GenericRequestPtr &request, bool top_prio
 Error RequestPool::AddRequest(GenericRequestPtr request, bool top_priority) {
     std::unique_lock<std::mutex> lock(mutex_);
     if (auto err = CanAddRequest(request, top_priority)) {
+        OriginalRequest* original_request = new OriginalRequest{};
+        original_request->request = std::move(request);
+        err->SetCustomData(std::unique_ptr<CustomErrorData>(original_request));
         return err;
     }
 
diff --git a/common/cpp/src/version/CMakeLists.txt b/common/cpp/src/version/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..71abb5416d246fb255410e132556e4f43f5c5b7b
--- /dev/null
+++ b/common/cpp/src/version/CMakeLists.txt
@@ -0,0 +1,12 @@
+set(TARGET_NAME version)
+set(SOURCE_FILES
+        version.cpp
+)
+
+################################
+# Library
+################################
+
+add_library(${TARGET_NAME} OBJECT ${SOURCE_FILES})
+target_include_directories(${TARGET_NAME} PUBLIC ${ASAPO_CXX_COMMON_INCLUDE_DIR}
+        ${CMAKE_SOURCE_DIR}/3d_party/rapidjson/include)
diff --git a/common/cpp/src/version/version.cpp b/common/cpp/src/version/version.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..57600be0ceb58b7e3515959b643194018d573310
--- /dev/null
+++ b/common/cpp/src/version/version.cpp
@@ -0,0 +1,28 @@
+#include "asapo/common/internal/version.h"
+#include "asapo/json_parser/json_parser.h"
+
+namespace asapo {
+
+Error ExtractVersionFromResponse(const std::string &response,
+                                 const std::string &client,
+                                 std::string* server_info,
+                                 bool* supported) {
+    JsonStringParser parser(response);
+    std::string server_version, current_client_protocol, client_supported;
+    Error err;
+    if ((err = parser.GetString("softwareVersion", &server_version))
+        || (err = parser.GetString("clientSupported", &client_supported))
+        || (err = parser.Embedded("clientProtocol").GetString("versionInfo", &current_client_protocol))) {
+        return err;
+    }
+    if (server_info) {
+        *server_info =
+            "Server version: " + server_version + ", " + client + " protocol on server: " + current_client_protocol;
+    }
+    if (supported) {
+        *supported = client_supported == "yes";
+    }
+    return nullptr;
+}
+
+}
\ No newline at end of file
diff --git a/common/cpp/unittests/data_structs/test_data_structs.cpp b/common/cpp/unittests/data_structs/test_data_structs.cpp
index 17357f5cbf7bdb2b7404bb4d920edd37813557bd..e6507bd21cf36d5c8cde8258a7070519ab3de026 100644
--- a/common/cpp/unittests/data_structs/test_data_structs.cpp
+++ b/common/cpp/unittests/data_structs/test_data_structs.cpp
@@ -140,6 +140,8 @@ StreamInfo PrepareStreamInfo() {
     StreamInfo sinfo;
     sinfo.last_id = 123;
     sinfo.name = "test";
+    sinfo.next_stream = "next";
+    sinfo.finished = true;
     sinfo.timestamp_created = std::chrono::time_point<std::chrono::system_clock>(std::chrono::milliseconds(1));
     sinfo.timestamp_lastentry = std::chrono::time_point<std::chrono::system_clock>(std::chrono::milliseconds(2));
     return sinfo;
@@ -157,36 +159,24 @@ TEST(StreamInfo, ConvertFromJson) {
     StreamInfo result;
 
     auto sinfo = PrepareStreamInfo();
-    std::string json = sinfo.Json(true);
+    std::string json = sinfo.Json();
 
-    auto ok = result.SetFromJson(json,true);
+    auto ok = result.SetFromJson(json);
 
     ASSERT_THAT(ok, Eq(true));
     ASSERT_THAT(result.last_id, sinfo.last_id);
     ASSERT_THAT(result.name, sinfo.name);
     ASSERT_THAT(result.timestamp_created, sinfo.timestamp_created);
     ASSERT_THAT(result.timestamp_lastentry, sinfo.timestamp_lastentry);
+    ASSERT_THAT(result.finished, sinfo.finished);
+    ASSERT_THAT(result.next_stream, sinfo.next_stream);
 }
 
-TEST(StreamInfo, ConvertFromJsonWithoutID) {
-    StreamInfo result;
-
-    auto sinfo = PrepareStreamInfo();
-    std::string json = sinfo.Json(false);
-
-    auto ok = result.SetFromJson(json,false);
-
-    ASSERT_THAT(ok, Eq(true));
-    ASSERT_THAT(result.name, sinfo.name);
-    ASSERT_THAT(result.timestamp_created, sinfo.timestamp_created);
-}
-
-
 TEST(StreamInfo, ConvertFromJsonErr) {
     StreamInfo result;
 
     std::string json = R"({"lastId":123)";
-    auto ok = result.SetFromJson(json,true);
+    auto ok = result.SetFromJson(json);
 
     ASSERT_THAT(ok, Eq(false));
     ASSERT_THAT(result.last_id, Eq(0));
@@ -195,20 +185,12 @@ TEST(StreamInfo, ConvertFromJsonErr) {
 TEST(StreamInfo, ConvertToJson) {
     auto sinfo = PrepareStreamInfo();
 
-    std::string expected_json = R"({"lastId":123,"name":"test","timestampCreated":1000000,"timestampLast":2000000})";
-    auto json = sinfo.Json(true);
+    std::string expected_json = R"({"lastId":123,"name":"test","timestampCreated":1000000,"timestampLast":2000000,"finished":true,"nextStream":"next"})";
+    auto json = sinfo.Json();
 
-    ASSERT_THAT(expected_json, Eq(json));
+    ASSERT_THAT(json,Eq(expected_json));
 }
 
-TEST(StreamInfo, ConvertToJsonWithoutID) {
-    auto sinfo = PrepareStreamInfo();
-
-    std::string expected_json = R"({"name":"test","timestampCreated":1000000})";
-    auto json = sinfo.Json(false);
-
-    ASSERT_THAT(expected_json, Eq(json));
-}
 
 TEST(SourceCredentials, ConvertToString) {
     auto sc = SourceCredentials{SourceType::kRaw,"beamtime","beamline","source","token"};
diff --git a/common/cpp/unittests/request/test_request_pool.cpp b/common/cpp/unittests/request/test_request_pool.cpp
index 8c428f9ad80fb02a549016b27024151d3b5569b3..ccda29729425b60a4a914e7d03ca8e060e93deff 100644
--- a/common/cpp/unittests/request/test_request_pool.cpp
+++ b/common/cpp/unittests/request/test_request_pool.cpp
@@ -6,6 +6,7 @@
 #include "asapo/common/error.h"
 
 #include "asapo/request/request_pool.h"
+#include "asapo/request/request_pool_error.h"
 #include "asapo/request/request_handler_factory.h"
 #include "mocking.h"
 
@@ -209,6 +210,8 @@ TEST_F(RequestPoolTests, RefuseAddRequestIfHitSizeLimitation) {
 
     ASSERT_THAT(nreq, Eq(1));
     ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kNoSpaceLeft));
+    auto err_data = static_cast<asapo::OriginalRequest*>(err->GetCustomData());
+    ASSERT_THAT(err_data, Ne(nullptr));
 }
 
 TEST_F(RequestPoolTests, RefuseAddRequestIfHitMemoryLimitation) {
@@ -225,6 +228,9 @@ TEST_F(RequestPoolTests, RefuseAddRequestIfHitMemoryLimitation) {
 
     ASSERT_THAT(nreq, Eq(1));
     ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kNoSpaceLeft));
+    auto err_data = static_cast<asapo::OriginalRequest*>(err->GetCustomData());
+    ASSERT_THAT(err_data, Ne(nullptr));
+
 }
 
 TEST_F(RequestPoolTests, RefuseAddRequestsIfHitSizeLimitation) {
diff --git a/common/go/src/asapo_common/go.mod b/common/go/src/asapo_common/go.mod
index ae7d8cf1e05a99c0ed5c8dc2d5383e570f58ca86..31eee8613f224bc7425474074f9199044934a36c 100644
--- a/common/go/src/asapo_common/go.mod
+++ b/common/go/src/asapo_common/go.mod
@@ -1,3 +1,10 @@
 module asapo_common
 
-go 1.14
+go 1.16
+
+require (
+	github.com/dgrijalva/jwt-go v3.2.0+incompatible
+	github.com/gorilla/mux v1.8.0
+	github.com/sirupsen/logrus v1.8.0
+	github.com/stretchr/testify v1.7.0
+)
diff --git a/common/go/src/asapo_common/go.sum b/common/go/src/asapo_common/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..6f35f25f5853eb59ed8eb5b781b6410839850826
--- /dev/null
+++ b/common/go/src/asapo_common/go.sum
@@ -0,0 +1,24 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/magefile/mage v1.10.0 h1:3HiXzCUY12kh9bIuyXShaVe529fJfyqoVM42o/uom2g=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
+github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/common/go/src/asapo_common/structs/structs.go b/common/go/src/asapo_common/structs/structs.go
new file mode 100644
index 0000000000000000000000000000000000000000..e6517c12d67cae9799a098500798a5a663dba3af
--- /dev/null
+++ b/common/go/src/asapo_common/structs/structs.go
@@ -0,0 +1,31 @@
+package structs
+
+type FolderTokenTokenExtraClaim struct {
+	RootFolder string
+}
+
+type AccessTokenExtraClaim struct {
+	AccessTypes []string
+}
+
+type IntrospectTokenRequest struct {
+	Token      string
+}
+
+type IntrospectTokenResponse struct {
+	Sub         string
+	AccessTypes []string
+}
+
+type IssueTokenRequest struct {
+	Subject     map[string]string
+	DaysValid   int
+	AccessTypes []string
+}
+
+type IssueTokenResponse struct {
+	Token       string
+	Sub         string
+	AccessTypes []string
+	Expires     string
+}
diff --git a/common/go/src/asapo_common/utils/authorization.go b/common/go/src/asapo_common/utils/authorization.go
index aac273c5a213b2fd79abe03a55e025a37c666523..c913611730477d2611c0e6410a77bbf25e72f05b 100644
--- a/common/go/src/asapo_common/utils/authorization.go
+++ b/common/go/src/asapo_common/utils/authorization.go
@@ -13,23 +13,19 @@ import (
 	"time"
 )
 
-type AuthorizationRequest struct {
-	Token   string
-	Command string
-	URL     string
+type Auth interface {
+	GenerateToken(...interface{}) (string, error)
+	ProcessAuth(http.HandlerFunc, string) http.HandlerFunc
+	Name() string
+	CheckAndGetContent(token string, extraClaims interface{}, payload ...interface{}) (string,error)
 }
 
-type AuthorizationResponce struct {
-	Status       int
-	StatusText   string
-	UserName     string
-	Token        string
-	ValidityTime int
+func SubjectFromBeamtime(bt string)string {
+	return "bt_"+bt
 }
 
-type Auth interface {
-	GenerateToken(...interface{}) (string, error)
-	Name() string
+func SubjectFromBeamline(bl string)string {
+	return "bl_"+bl
 }
 
 
@@ -79,10 +75,15 @@ func ExtractAuthInfo(r *http.Request) (authType, token string, err error) {
 
 type CustomClaims struct {
 	jwt.StandardClaims
-	Duration    time.Duration
 	ExtraClaims interface{}
 }
 
+func (claim *CustomClaims) SetExpiration(duration time.Duration){
+	if duration > 0 {
+		claim.ExpiresAt = time.Now().Add(duration).Unix()
+	}
+}
+
 type JWTAuth struct {
 	Key string
 }
@@ -101,10 +102,6 @@ func (t JWTAuth) GenerateToken(val ...interface{}) (string, error) {
 		return "", errors.New("Wrong claims")
 	}
 
-	if claims.Duration > 0 {
-		claims.ExpiresAt = time.Now().Add(claims.Duration).Unix()
-	}
-
 	token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
 	tokenString, err := token.SignedString([]byte(t.Key))
 
@@ -115,6 +112,11 @@ func (t JWTAuth) GenerateToken(val ...interface{}) (string, error) {
 	return tokenString, nil
 }
 
+func (a *JWTAuth)ProcessAuth(fn http.HandlerFunc, payload string) http.HandlerFunc {
+	// payload ignored
+	return ProcessJWTAuth(fn,a.Key)
+}
+
 func ProcessJWTAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
 	return func(w http.ResponseWriter, r *http.Request) {
 		if (r.RequestURI == "/health-check") { // always allow /health-check request
@@ -132,7 +134,7 @@ func ProcessJWTAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
 
 		if authType == "Bearer" {
 			if claims, ok := CheckJWTToken(token, key); !ok {
-				http.Error(w, "Authorization error - tocken does not match", http.StatusUnauthorized)
+				http.Error(w, "Authorization error - token does not match", http.StatusUnauthorized)
 				return
 			} else {
 				ctx = context.WithValue(ctx, "TokenClaims", claims)
@@ -145,6 +147,26 @@ func ProcessJWTAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
 	}
 }
 
+func (a *JWTAuth) CheckAndGetContent(token string, extraClaims interface{}, payload ...interface{}) (subject string,err error) {
+	// payload ignored
+	c, ok := CheckJWTToken(token,a.Key)
+	if !ok {
+		return "",errors.New("wrong JWT token")
+	}
+	claim,ok  := c.(*CustomClaims)
+	if !ok {
+		return "",errors.New("cannot get CustomClaims")
+	}
+
+	subject = claim.Subject
+
+	if extraClaims!=nil {
+		err = MapToStruct(claim.ExtraClaims.(map[string]interface{}), extraClaims)
+	}
+	return subject,err
+}
+
+
 func CheckJWTToken(token, key string) (jwt.Claims, bool) {
 
 	if token == "" {
@@ -162,14 +184,21 @@ func CheckJWTToken(token, key string) (jwt.Claims, bool) {
 	return nil, false
 }
 
-func JobClaimFromContext(r *http.Request, val interface{}) error {
+func JobClaimFromContext(r *http.Request, customClaim **CustomClaims, val interface{}) error {
 	c := r.Context().Value("TokenClaims")
 
 	if c == nil {
 		return errors.New("Empty context")
 	}
 
-	claim := c.(*CustomClaims)
+	claim,ok  := c.(*CustomClaims)
+	if !ok {
+		return errors.New("cannot get CustomClaims")
+	}
+
+	if customClaim!=nil {
+		*customClaim = claim
+	}
 
 	return MapToStruct(claim.ExtraClaims.(map[string]interface{}), val)
 }
@@ -184,7 +213,7 @@ func NewHMACAuth(key string) *HMACAuth {
 }
 
 func (a *HMACAuth) Name() string {
-	return "Bearer"
+	return "HMAC-SHA-256"
 }
 
 
@@ -209,7 +238,11 @@ func (h HMACAuth) GenerateToken(val ...interface{}) (string, error) {
 	return sha, nil
 }
 
-func ProcessHMACAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
+func (a *HMACAuth)ProcessAuth(fn http.HandlerFunc, payload string) http.HandlerFunc {
+	return ProcessHMACAuth(fn,payload,a.Key)
+}
+
+func ProcessHMACAuth(fn http.HandlerFunc, payload, key string) http.HandlerFunc {
 	return func(w http.ResponseWriter, r *http.Request) {
 
 		authType, token, err := ExtractAuthInfo(r)
@@ -218,11 +251,9 @@ func ProcessHMACAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
 			http.Error(w, err.Error(), http.StatusUnauthorized)
 			return
 		}
-	// todo extract beamline from request
-		value := "beamline"
 		if authType == "HMAC-SHA-256" {
-			if !CheckHMACToken(value, token, key) {
-				http.Error(w, "Internal authorization error - tocken does not match", http.StatusUnauthorized)
+			if !CheckHMACToken(payload, token, key) {
+				http.Error(w, "Internal authorization error - token does not match", http.StatusUnauthorized)
 				return
 			}
 		} else {
@@ -233,6 +264,23 @@ func ProcessHMACAuth(fn http.HandlerFunc, key string) http.HandlerFunc {
 	}
 }
 
+func (a *HMACAuth) CheckAndGetContent(token string, _ interface{}, payload ...interface{}) (string,error) {
+	if len(payload) != 1 {
+		return "",errors.New("wrong payload")
+	}
+	value, ok := payload[0].(string)
+	if !ok {
+		return "",errors.New("wrong payload")
+	}
+
+	ok = CheckHMACToken(token,value,a.Key)
+	if !ok {
+		return "",errors.New("wrong HMAC token")
+	}
+	return value,nil
+
+}
+
 func CheckHMACToken(value string, token, key string) bool {
 
 	if token == "" {
diff --git a/common/go/src/asapo_common/utils/authorization_test.go b/common/go/src/asapo_common/utils/authorization_test.go
index d8a9b47745443a470d52051d73e2de11d36ab94d..cda7f43b091bfba5dc5f8228199e6ae125f01933 100644
--- a/common/go/src/asapo_common/utils/authorization_test.go
+++ b/common/go/src/asapo_common/utils/authorization_test.go
@@ -8,8 +8,16 @@ import (
 	"github.com/stretchr/testify/assert"
 )
 
+type authorizationResponse struct {
+	Status       int
+	StatusText   string
+	UserName     string
+	Token        string
+	ValidityTime int
+}
+
 type JobClaim struct {
-	AuthorizationResponce
+	authorizationResponse
 	JobInd string
 }
 
@@ -17,7 +25,7 @@ type JobClaim struct {
 func writeAuthResponse(w http.ResponseWriter, r *http.Request) {
 	w.WriteHeader(http.StatusOK)
 	var jc JobClaim
-	JobClaimFromContext(r, &jc)
+	JobClaimFromContext(r,nil,&jc)
 	w.Write([]byte(jc.UserName))
 	w.Write([]byte(jc.JobInd))
 }
@@ -25,14 +33,15 @@ func writeAuthResponse(w http.ResponseWriter, r *http.Request) {
 func TestGenerateJWTToken(t *testing.T) {
 
 	a := NewJWTAuth("hi")
-	token, _ := a.GenerateToken((&CustomClaims{Duration: 0, ExtraClaims: nil}))
-	assert.Equal(t, "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJEdXJhdGlvbiI"+
-		"6MCwiRXh0cmFDbGFpbXMiOm51bGx9.JJcqNZciIDILk-A2sJZCY1sND458bcjNv6tXC2jxric",
+	cc := CustomClaims{ExtraClaims: nil}
+	cc.SetExpiration(0)
+	token, _ := a.GenerateToken((&cc))
+	assert.Equal(t, "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJFeHRyYUNsYWltcyI6bnVsbH0.QXaiODT7V1tEwmVKCLfpH2WbgjNJpqJcNgeVivFm7GY",
 		token, "jwt token")
 
 }
 
-var HJWTAuthtests = []struct {
+var JWTAuthtests = []struct {
 	Mode       string
 	Key        string
 	User       string
@@ -49,7 +58,7 @@ var HJWTAuthtests = []struct {
 }
 
 func TestProcessJWTAuth(t *testing.T) {
-	for _, test := range HJWTAuthtests {
+	for _, test := range JWTAuthtests {
 		req, _ := http.NewRequest("POST", "http://blabla", nil)
 
 		var claim JobClaim
@@ -58,7 +67,9 @@ func TestProcessJWTAuth(t *testing.T) {
 
 		a := NewJWTAuth(test.Key)
 
-		token, _ := a.GenerateToken((&CustomClaims{Duration: test.Duration, ExtraClaims: &claim}))
+		cc:= CustomClaims{ExtraClaims: &claim}
+		cc.SetExpiration(test.Duration)
+		token, _ := a.GenerateToken((&cc))
 		if test.Mode == "header" {
 			req.Header.Add("Authorization", "Bearer "+token)
 		}
diff --git a/common/go/src/asapo_common/utils/helpers.go b/common/go/src/asapo_common/utils/helpers.go
index 9b7dc20936f9c4da8d56825b2cafcc2ea317dcf9..714cebf61ad85db215259c5147d06af861c51ad5 100644
--- a/common/go/src/asapo_common/utils/helpers.go
+++ b/common/go/src/asapo_common/utils/helpers.go
@@ -25,6 +25,22 @@ func MapToJson(res interface{}) ([]byte, error) {
 	}
 }
 
+func DeepCopy(a, b interface{}) {
+	byt, _ := json.Marshal(a)
+	json.Unmarshal(byt, b)
+}
+
+
+func GetInt64FromMap(s map[string]interface{}, name string) (int64,bool) {
+	val, ok := InterfaceToInt64(s[name])
+	if ok {
+		return val,true
+	} else {
+		return -1, false
+	}
+}
+
+
 func InterfaceToInt64(val interface{}) (int64, bool) {
 	val64, ok := val.(int64)
 	var valf64 float64
diff --git a/common/go/src/asapo_common/utils/http.go b/common/go/src/asapo_common/utils/http.go
index 4e6f9736e6ff015ba66a5b56a6b32b9e3a1e4982..4a6ce7daef8084c202bebc439676e4fee071665b 100644
--- a/common/go/src/asapo_common/utils/http.go
+++ b/common/go/src/asapo_common/utils/http.go
@@ -10,7 +10,8 @@ func ExtractRequest(r *http.Request, request interface{}) error {
 	return decoder.Decode(request)
 }
 
-func WriteServerError(w http.ResponseWriter, err error,code int) {
+func WriteServerError(w http.ResponseWriter, err error, code int) {
 	w.WriteHeader(code)
 	w.Write([]byte(err.Error()))
-}
\ No newline at end of file
+}
+
diff --git a/common/go/src/asapo_common/utils/structs.go b/common/go/src/asapo_common/utils/structs.go
deleted file mode 100644
index 37f7a1f567f2f39b8ff953d8796a19d1e6e8f1bc..0000000000000000000000000000000000000000
--- a/common/go/src/asapo_common/utils/structs.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package utils
-
-type FolderTokenTokenExtraClaim struct {
-	RootFolder string
-}
diff --git a/common/go/src/asapo_common/utils/version.go b/common/go/src/asapo_common/utils/version.go
new file mode 100644
index 0000000000000000000000000000000000000000..5e8e03a9dff6ade41f5df8bf421d03bc2ff91987
--- /dev/null
+++ b/common/go/src/asapo_common/utils/version.go
@@ -0,0 +1,46 @@
+package utils
+
+import (
+	"errors"
+	"github.com/gorilla/mux"
+	"net/http"
+	"strconv"
+	"strings"
+)
+
+
+func VersionToNumber(ver string) int {
+	ver = strings.TrimPrefix(ver,"v")
+	floatNum, err := strconv.ParseFloat(ver, 64)
+	if err!=nil {
+		return 0
+	}
+	return int(floatNum*1000)
+}
+
+
+func ExtractVersion(r *http.Request) (int, error) {
+	vars := mux.Vars(r)
+	ver_str, ok := vars["apiver"]
+	if !ok {
+		return 0, errors.New("cannot extract version")
+	}
+	ver := VersionToNumber(ver_str)
+	if ver == 0 {
+		return 0, errors.New("cannot extract version")
+	}
+	return ver, nil
+}
+
+func PrecheckApiVersion(w http.ResponseWriter, r *http.Request, currentVersion string) (apiVer int, ok bool) {
+	apiVer, err := ExtractVersion(r)
+	if err != nil {
+		WriteServerError(w, err, http.StatusBadRequest)
+		return 0, false
+	}
+	if apiVer > VersionToNumber(currentVersion) {
+		WriteServerError(w, errors.New("version not supported"), http.StatusUnsupportedMediaType)
+		return 0, false
+	}
+	return apiVer, true
+}
diff --git a/common/go/src/asapo_common/version/version.go b/common/go/src/asapo_common/version/version.go
index d4674f37bddf4e84eaaec6cde2686ed6e68f306a..769bb508f97920842e834725840f056ff6342263 100644
--- a/common/go/src/asapo_common/version/version.go
+++ b/common/go/src/asapo_common/version/version.go
@@ -8,6 +8,35 @@ import (
 
 var version string
 
+var consumerProtocolVersion string
+var producerProtocolVersion string
+var discoveryApiVersion string
+var authorizerApiVersion string
+var ftsApiVersion string
+var brokerApiVersion string
+
+func GetDiscoveryApiVersion() string {
+	return discoveryApiVersion
+}
+func GetAuthorizerApiVersion() string {
+	return authorizerApiVersion
+}
+func GetFtsApiVersion() string {
+	return ftsApiVersion
+}
+func GetBrokerApiVersion() string {
+	return brokerApiVersion
+}
+
+func GetProducerProtocolVersion() string {
+	return producerProtocolVersion
+}
+
+func GetConsumerProtocolVersion() string {
+	return consumerProtocolVersion
+}
+
+
 func GetVersion() string {
     return version
 }
diff --git a/common/go/src/asapo_common/version/version_lib.go.in b/common/go/src/asapo_common/version/version_lib.go.in
index 5fc343fe161f109a008ddad671ee7ba3eabd14d1..a276d7babb1542811c6444c7c49c54de33e14474 100644
--- a/common/go/src/asapo_common/version/version_lib.go.in
+++ b/common/go/src/asapo_common/version/version_lib.go.in
@@ -4,4 +4,10 @@ package version
 // This file is overridden on build with build-time informations.
 func init(){
 	version   = "@ASAPO_VERSION@@ASAPO_VERSION_COMMIT@"
+	consumerProtocolVersion = "@ASAPO_CONSUMER_PROTOCOL@"
+    producerProtocolVersion = "@ASAPO_PRODUCER_PROTOCOL@"
+    discoveryApiVersion = "@ASAPO_DISCOVERY_API_VER@"
+    authorizerApiVersion = "@ASAPO_AUTHORIZER_API_VER@"
+    ftsApiVersion = "@ASAPO_FILE_TRANSFER_SERVICE_API_VER@"
+    brokerApiVersion = "@ASAPO_BROKER_API_VER@"
 }
diff --git a/config/nomad/authorizer.nmd.in b/config/nomad/authorizer.nmd.in
index dfd6072465d23f6deff1ba2b1b5694f6c6030be5..7fd1366679d2a9bc8770f5df2c220e146aed9a46 100644
--- a/config/nomad/authorizer.nmd.in
+++ b/config/nomad/authorizer.nmd.in
@@ -50,6 +50,13 @@ job "authorizer" {
          change_mode   = "signal"
          change_signal = "SIGHUP"
       }
+      template {
+         source        = "@WORK_DIR@/auth_secret_admin.key"
+         destination   = "auth_secret_admin.key"
+         change_mode   = "signal"
+         change_signal = "SIGHUP"
+      }
+
     }
   }
 }
diff --git a/config/nomad/discovery.nmd.in b/config/nomad/discovery.nmd.in
index 760aed3c700405ec289b47de69c0dd37c4ba4730..e3c36befc01f51fcbd7478342fe6fb6006d7b916 100644
--- a/config/nomad/discovery.nmd.in
+++ b/config/nomad/discovery.nmd.in
@@ -30,7 +30,7 @@ job "discovery" {
         check {
           name     = "alive"
           type     = "http"
-          path     = "/asapo-receiver"
+          path     = "/health"
           interval = "10s"
           timeout  = "2s"
           initial_status =   "passing"
diff --git a/consumer/api/cpp/CMakeLists.txt b/consumer/api/cpp/CMakeLists.txt
index 24661f0da20047c86732a811b657cf9f8d5e24ee..47d30a20f9a39dd6f83f070f39d6ac67e838fa68 100644
--- a/consumer/api/cpp/CMakeLists.txt
+++ b/consumer/api/cpp/CMakeLists.txt
@@ -12,7 +12,7 @@ set(SOURCE_FILES
 # Library
 ################################
 add_library(${TARGET_NAME} STATIC ${SOURCE_FILES} $<TARGET_OBJECTS:system_io>
-            $<TARGET_OBJECTS:json_parser> $<TARGET_OBJECTS:data_structs> $<TARGET_OBJECTS:curl_http_client> )
+            $<TARGET_OBJECTS:json_parser> $<TARGET_OBJECTS:data_structs> $<TARGET_OBJECTS:version>  $<TARGET_OBJECTS:curl_http_client> )
 
 target_include_directories(${TARGET_NAME} PUBLIC include ${ASAPO_CXX_COMMON_INCLUDE_DIR}  ${LIBFABRIC_INCLUDE_DIR} ${CURL_INCLUDE_DIRS})
 
diff --git a/consumer/api/cpp/include/asapo/asapo_consumer.h b/consumer/api/cpp/include/asapo/asapo_consumer.h
index e9dee4e9b5ebe0b003680e268eae48b32e91a32b..176d1d56269f35053b1fe8285a84352f6b72d487 100644
--- a/consumer/api/cpp/include/asapo/asapo_consumer.h
+++ b/consumer/api/cpp/include/asapo/asapo_consumer.h
@@ -3,7 +3,6 @@
 
 #include "asapo/consumer/consumer.h"
 #include "asapo/consumer/consumer_error.h"
-#include "asapo/common/version.h"
 #include <ostream>
 
 #endif //ASAPO_ASAPO_CONSUMER_H
diff --git a/consumer/api/cpp/include/asapo/consumer/consumer.h b/consumer/api/cpp/include/asapo/consumer/consumer.h
index 678c7179b73a53cdace52021d4e2488a951e52a4..dba769567f4fec4dbd39de384f50f79205a024bc 100644
--- a/consumer/api/cpp/include/asapo/consumer/consumer.h
+++ b/consumer/api/cpp/include/asapo/consumer/consumer.h
@@ -12,6 +12,12 @@
 
 namespace asapo {
 
+enum class StreamFilter {
+  kAllStreams,
+  kFinishedStreams,
+  kUnfinishedStreams
+};
+
 class Consumer {
   public:
     //! Reset counter for the specific group.
@@ -21,6 +27,14 @@ class Consumer {
       \return nullptr of command was successful, otherwise error.
     */
     virtual Error ResetLastReadMarker(std::string group_id, std::string stream) = 0;
+  //! Return version
+  /*!
+    \param client_info - for client version
+    \param server_info - for server
+    \param supported - set to true if client is supported by server
+    \return nullptr of command was successful, otherwise error.
+  */
+    virtual Error GetVersionInfo(std::string* client_info,std::string* server_info, bool* supported) = 0;
 
     virtual Error SetLastReadMarker(std::string group_id, uint64_t value, std::string stream) = 0;
 
@@ -73,10 +87,10 @@ class Consumer {
      */
     virtual NetworkConnectionType CurrentConnectionType() const = 0;
 
-    //! Get list of streams, set from to "" to get all streams
-    virtual StreamInfos GetStreamList(std::string from, Error* err) = 0;
+  //! Get list of streams with filter, set from to "" to get all streams
+    virtual StreamInfos GetStreamList(std::string from,  StreamFilter filter, Error* err) = 0;
 
-    //! Get current number of datasets
+    //! Get current number of messages in stream
     /*!
       \param stream - stream to use
       \param err - return nullptr of operation succeed, error otherwise.
@@ -84,11 +98,21 @@ class Consumer {
     */
     virtual uint64_t GetCurrentSize(std::string stream, Error* err) = 0;
 
-    //! Generate new GroupID.
-    /*!
-      \param err - return nullptr of operation succeed, error otherwise.
-      \return group ID.
-    */
+  //! Get current number of datasets in stream
+  /*!
+    \param stream - stream to use
+    \param include_incomplete - flag to count incomplete datasets as well
+    \param err - return nullptr of operation succeed, error otherwise.
+    \return number of datasets.
+  */
+    virtual uint64_t GetCurrentDatasetCount(std::string stream, bool include_incomplete, Error* err) = 0;
+
+  //! Generate new GroupID.
+  /*!
+    \param err - return nullptr of operation succeed, error otherwise.
+    \return group ID.
+  */
+
     virtual std::string GenerateNewGroupId(Error* err) = 0;
 
     //! Get Beamtime metadata.
diff --git a/consumer/api/cpp/include/asapo/consumer/consumer_error.h b/consumer/api/cpp/include/asapo/consumer/consumer_error.h
index 7f3990d226fbcc3f663e367992496cc07e0fa3f7..38e5329d8f4aad6fdd98e25a85aae6d609c8cb04 100644
--- a/consumer/api/cpp/include/asapo/consumer/consumer_error.h
+++ b/consumer/api/cpp/include/asapo/consumer/consumer_error.h
@@ -14,7 +14,8 @@ enum class ConsumerErrorType {
     kInterruptedTransaction,
     kLocalIOError,
     kWrongInput,
-    kPartialData
+    kPartialData,
+    kUnsupportedClient
 };
 
 using ConsumerErrorTemplate = ServiceErrorTemplate<ConsumerErrorType, ErrorType::kConsumerError>;
@@ -63,6 +64,11 @@ auto const kWrongInput = ConsumerErrorTemplate {
     "wrong input", ConsumerErrorType::kWrongInput
 };
 
+auto const kUnsupportedClient = ConsumerErrorTemplate {
+    "unsupported client version", ConsumerErrorType::kUnsupportedClient
+};
+
+
 auto const kInterruptedTransaction = ConsumerErrorTemplate {
     "server error", ConsumerErrorType::kInterruptedTransaction
 };
diff --git a/consumer/api/cpp/src/consumer_impl.cpp b/consumer/api/cpp/src/consumer_impl.cpp
index 7a94ecf6e028e1f0444eb5d18e298cd3421ad62b..7d3d565e5db6a80a6382e31b6c39adaf7d9f504c 100644
--- a/consumer/api/cpp/src/consumer_impl.cpp
+++ b/consumer/api/cpp/src/consumer_impl.cpp
@@ -12,6 +12,8 @@
 #include "fabric_consumer_client.h"
 #include "rds_response_error.h"
 
+#include "asapo/common/internal/version.h"
+
 using std::chrono::system_clock;
 
 namespace asapo {
@@ -32,7 +34,7 @@ Error GetNoDataResponseFromJson(const std::string &json_string, ConsumerErrorDat
 Error GetPartialDataResponseFromJson(const std::string &json_string, PartialErrorData* data) {
     Error err;
     auto parser = JsonStringParser(json_string);
-    uint64_t  id,size;
+    uint64_t id, size;
     if ((err = parser.GetUInt64("size", &size)) ||
         (err = parser.GetUInt64("_id", &id))) {
         return err;
@@ -84,6 +86,7 @@ Error ConsumerErrorFromHttpCode(const RequestOutput* response, const HttpCode &c
         case HttpCode::InternalServerError:return ConsumerErrorTemplates::kInterruptedTransaction.Generate(response->to_string());
         case HttpCode::NotFound:return ConsumerErrorTemplates::kUnavailableService.Generate(response->to_string());
         case HttpCode::Conflict:return ConsumerErrorFromNoDataResponse(response->to_string());
+        case HttpCode::UnsupportedMediaType:return ConsumerErrorTemplates::kUnsupportedClient.Generate(response->to_string());
         default:return ConsumerErrorTemplates::kInterruptedTransaction.Generate(response->to_string());
     }
 }
@@ -178,19 +181,20 @@ Error ConsumerImpl::ProcessRequest(RequestOutput* response, const RequestInfo &r
     return ProcessRequestResponce(err, response, code);
 }
 
-Error ConsumerImpl::DiscoverService(const std::string &service_name, std::string* uri_to_set) {
-    if (!uri_to_set->empty()) {
-        return nullptr;
-    }
+RequestInfo ConsumerImpl::GetDiscoveryRequest(const std::string &service_name) const {
     RequestInfo ri;
     ri.host = endpoint_;
-    ri.api = "/asapo-discovery/" + service_name;
-    RequestOutput output;
-    Error err;
-    err = ProcessRequest(&output, ri, nullptr);
-    *uri_to_set = std::move(output.string_output);
+    ri.api = "/asapo-discovery/" + kConsumerProtocol.GetDiscoveryVersion() + "/" + service_name;
+    ri.extra_params = "&protocol=" + kConsumerProtocol.GetVersion();
+    return ri;
+}
+
+Error ConsumerImpl::ProcessDiscoverServiceResult(Error err, std::string* uri_to_set) {
     if (err != nullptr || uri_to_set->empty()) {
         uri_to_set->clear();
+        if (err == ConsumerErrorTemplates::kUnsupportedClient) {
+            return err;
+        }
         return ConsumerErrorTemplates::kUnavailableService.Generate(" on " + endpoint_
                                                                         + (err != nullptr ? ": " + err->Explain()
                                                                                           : ""));
@@ -198,6 +202,17 @@ Error ConsumerImpl::DiscoverService(const std::string &service_name, std::string
     return nullptr;
 }
 
+Error ConsumerImpl::DiscoverService(const std::string &service_name, std::string* uri_to_set) {
+    if (!uri_to_set->empty()) {
+        return nullptr;
+    }
+    auto ri = GetDiscoveryRequest(service_name);
+    RequestOutput output;
+    auto err = ProcessRequest(&output, ri, nullptr);
+    *uri_to_set = std::move(output.string_output);
+    return ProcessDiscoverServiceResult(std::move(err), uri_to_set);
+}
+
 bool ConsumerImpl::SwitchToGetByIdIfPartialData(Error* err,
                                                 const std::string &response,
                                                 std::string* group_id,
@@ -215,7 +230,10 @@ bool ConsumerImpl::SwitchToGetByIdIfPartialData(Error* err,
     return false;
 }
 
-bool ConsumerImpl::SwitchToGetByIdIfNoData(Error* err, const std::string &response, std::string* group_id, std::string* redirect_uri) {
+bool ConsumerImpl::SwitchToGetByIdIfNoData(Error* err,
+                                           const std::string &response,
+                                           std::string* group_id,
+                                           std::string* redirect_uri) {
     if (*err == ConsumerErrorTemplates::kNoData) {
         auto error_data = static_cast<const ConsumerErrorData*>((*err)->GetCustomData());
         if (error_data == nullptr) {
@@ -235,7 +253,7 @@ RequestInfo ConsumerImpl::PrepareRequestInfo(std::string api_url, bool dataset,
     ri.api = std::move(api_url);
     if (dataset) {
         ri.extra_params = "&dataset=true";
-        ri.extra_params += "&minsize="+std::to_string(min_size);
+        ri.extra_params += "&minsize=" + std::to_string(min_size);
     }
     return ri;
 }
@@ -248,10 +266,12 @@ Error ConsumerImpl::GetRecordFromServer(std::string* response, std::string group
         return ConsumerErrorTemplates::kWrongInput.Generate("empty stream");
     }
 
-    interrupt_flag_= false;
+    interrupt_flag_ = false;
     std::string request_suffix = OpToUriCmd(op);
     std::string request_group = OpToUriCmd(op);
-    std::string request_api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source
+    std::string
+        request_api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source
         + "/" + std::move(stream);
     uint64_t elapsed_ms = 0;
     Error no_data_error;
@@ -301,20 +321,20 @@ Error ConsumerImpl::GetRecordFromServer(std::string* response, std::string group
 
 Error ConsumerImpl::GetNext(std::string group_id, MessageMeta* info, MessageData* data, std::string stream) {
     return GetMessageFromServer(GetMessageServerOperation::GetNext,
-                              0,
-                              std::move(group_id),
-                              std::move(stream),
-                              info,
-                              data);
+                                0,
+                                std::move(group_id),
+                                std::move(stream),
+                                info,
+                                data);
 }
 
 Error ConsumerImpl::GetLast(MessageMeta* info, MessageData* data, std::string stream) {
     return GetMessageFromServer(GetMessageServerOperation::GetLast,
-                              0,
-                              "0",
-                              std::move(stream),
-                              info,
-                              data);
+                                0,
+                                "0",
+                                std::move(stream),
+                                info,
+                                data);
 }
 
 std::string ConsumerImpl::OpToUriCmd(GetMessageServerOperation op) {
@@ -326,9 +346,9 @@ std::string ConsumerImpl::OpToUriCmd(GetMessageServerOperation op) {
 }
 
 Error ConsumerImpl::GetMessageFromServer(GetMessageServerOperation op, uint64_t id, std::string group_id,
-                                       std::string stream,
-                                       MessageMeta* info,
-                                       MessageData* data) {
+                                         std::string stream,
+                                         MessageMeta* info,
+                                         MessageData* data) {
     if (info == nullptr) {
         return ConsumerErrorTemplates::kWrongInput.Generate();
     }
@@ -437,7 +457,7 @@ Error ConsumerImpl::TryGetDataFromBuffer(const MessageMeta* info, MessageData* d
 
 std::string ConsumerImpl::GenerateNewGroupId(Error* err) {
     RequestInfo ri;
-    ri.api = "/creategroup";
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/creategroup";
     ri.post = true;
     return BrokerRequestWithTimeout(ri, err);
 }
@@ -446,7 +466,7 @@ Error ConsumerImpl::ServiceRequestWithTimeout(const std::string &service_name,
                                               std::string* service_uri,
                                               RequestInfo request,
                                               RequestOutput* response) {
-    interrupt_flag_= false;
+    interrupt_flag_ = false;
     uint64_t elapsed_ms = 0;
     Error err;
     while (elapsed_ms <= timeout_ms_) {
@@ -498,7 +518,7 @@ Error ConsumerImpl::FtsRequestWithTimeout(MessageMeta* info, MessageData* data)
 
 RequestInfo ConsumerImpl::CreateFileTransferRequest(const MessageMeta* info) const {
     RequestInfo ri;
-    ri.api = "/transfer";
+    ri.api = "/" + kConsumerProtocol.GetFileTransferServiceVersion() + "/transfer";
     ri.post = true;
     ri.body = "{\"Folder\":\"" + source_path_ + "\",\"FileName\":\"" + info->name + "\"}";
     ri.cookie = "Authorization=Bearer " + folder_token_;
@@ -518,7 +538,8 @@ Error ConsumerImpl::ResetLastReadMarker(std::string group_id, std::string stream
 
 Error ConsumerImpl::SetLastReadMarker(std::string group_id, uint64_t value, std::string stream) {
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source + "/"
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source + "/"
         + std::move(stream) + "/" + std::move(group_id) + "/resetcounter";
     ri.extra_params = "&value=" + std::to_string(value);
     ri.post = true;
@@ -529,20 +550,8 @@ Error ConsumerImpl::SetLastReadMarker(std::string group_id, uint64_t value, std:
 }
 
 uint64_t ConsumerImpl::GetCurrentSize(std::string stream, Error* err) {
-    RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
-        +"/" + std::move(stream) + "/size";
-    auto responce = BrokerRequestWithTimeout(ri, err);
-    if (*err) {
-        return 0;
-    }
-
-    JsonStringParser parser(responce);
-    uint64_t size;
-    if ((*err = parser.GetUInt64("size", &size)) != nullptr) {
-        return 0;
-    }
-    return size;
+    auto ri = GetSizeRequestForSingleMessagesStream(stream);
+    return GetCurrentCount(stream, ri, err);
 }
 
 Error ConsumerImpl::GetById(uint64_t id, MessageMeta* info, MessageData* data, std::string stream) {
@@ -560,13 +569,14 @@ Error ConsumerImpl::GetRecordFromServerById(uint64_t id, std::string* response,
     }
 
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         +"/" + std::move(stream) +
         "/" + std::move(
         group_id) + "/" + std::to_string(id);
     if (dataset) {
         ri.extra_params += "&dataset=true";
-        ri.extra_params += "&minsize="+std::to_string(min_size);
+        ri.extra_params += "&minsize=" + std::to_string(min_size);
     }
 
     Error err;
@@ -576,7 +586,9 @@ Error ConsumerImpl::GetRecordFromServerById(uint64_t id, std::string* response,
 
 std::string ConsumerImpl::GetBeamtimeMeta(Error* err) {
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source + "/default/0/meta/0";
+    ri.api =
+        "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+            + source_credentials_.data_source + "/default/0/meta/0";
 
     return BrokerRequestWithTimeout(ri, err);
 }
@@ -585,7 +597,7 @@ DataSet DecodeDatasetFromResponse(std::string response, Error* err) {
     DataSet res;
     if (!res.SetFromJson(std::move(response))) {
         *err = ConsumerErrorTemplates::kInterruptedTransaction.Generate("malformed response:" + response);
-        return {0,0,MessageMetas{}};
+        return {0, 0, MessageMetas{}};
     } else {
         return res;
     }
@@ -598,7 +610,8 @@ MessageMetas ConsumerImpl::QueryMessages(std::string query, std::string stream,
     }
 
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         "/" + std::move(stream) + "/0/querymessages";
     ri.post = true;
     ri.body = std::move(query);
@@ -613,11 +626,16 @@ MessageMetas ConsumerImpl::QueryMessages(std::string query, std::string stream,
 }
 
 DataSet ConsumerImpl::GetNextDataset(std::string group_id, uint64_t min_size, std::string stream, Error* err) {
-    return GetDatasetFromServer(GetMessageServerOperation::GetNext, 0, std::move(group_id), std::move(stream),min_size, err);
+    return GetDatasetFromServer(GetMessageServerOperation::GetNext,
+                                0,
+                                std::move(group_id),
+                                std::move(stream),
+                                min_size,
+                                err);
 }
 
 DataSet ConsumerImpl::GetLastDataset(uint64_t min_size, std::string stream, Error* err) {
-    return GetDatasetFromServer(GetMessageServerOperation::GetLast, 0, "0", std::move(stream),min_size, err);
+    return GetDatasetFromServer(GetMessageServerOperation::GetLast, 0, "0", std::move(stream), min_size, err);
 }
 
 DataSet ConsumerImpl::GetDatasetFromServer(GetMessageServerOperation op,
@@ -632,15 +650,15 @@ DataSet ConsumerImpl::GetDatasetFromServer(GetMessageServerOperation op,
     } else {
         *err = GetRecordFromServer(&response, std::move(group_id), std::move(stream), op, true, min_size);
     }
-    if (*err != nullptr && *err!=ConsumerErrorTemplates::kPartialData) {
-        return {0, 0,MessageMetas{}};
+    if (*err != nullptr && *err != ConsumerErrorTemplates::kPartialData) {
+        return {0, 0, MessageMetas{}};
     }
     return DecodeDatasetFromResponse(response, err);
 }
 
 DataSet ConsumerImpl::GetDatasetById(uint64_t id, uint64_t min_size, std::string stream, Error* err) {
     if (id == 0) {
-        *err =  ConsumerErrorTemplates::kWrongInput.Generate("id should be positive");
+        *err = ConsumerErrorTemplates::kWrongInput.Generate("id should be positive");
         return {};
     }
     return GetDatasetFromServer(GetMessageServerOperation::GetID, id, "0", std::move(stream), min_size, err);
@@ -657,7 +675,7 @@ StreamInfos ParseStreamsFromResponse(std::string response, Error* err) {
     }
     for (auto stream_encoded : streams_endcoded) {
         StreamInfo si;
-        auto ok = si.SetFromJson(stream_encoded, false);
+        auto ok = si.SetFromJson(stream_encoded);
         if (!ok) {
             *err = TextError("cannot parse " + stream_encoded);
             return StreamInfos{};
@@ -667,23 +685,36 @@ StreamInfos ParseStreamsFromResponse(std::string response, Error* err) {
     return streams;
 }
 
-StreamInfos ConsumerImpl::GetStreamList(std::string from, Error* err) {
-
-    RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source + "/0/streams";
-    ri.post = false;
-    if (!from.empty()) {
-        ri.extra_params = "&from=" + from;
+std::string filterToString(StreamFilter filter) {
+    switch (filter) {
+        case StreamFilter::kAllStreams:return "all";
+        case StreamFilter::kFinishedStreams:return "finished";
+        case StreamFilter::kUnfinishedStreams:return "unfinished";
     }
+}
+
+StreamInfos ConsumerImpl::GetStreamList(std::string from, StreamFilter filter, Error* err) {
+    RequestInfo ri = GetStreamListRequest(from, filter);
 
     auto response = BrokerRequestWithTimeout(ri, err);
     if (*err) {
         return StreamInfos{};
     }
-
     return ParseStreamsFromResponse(std::move(response), err);
 }
 
+RequestInfo ConsumerImpl::GetStreamListRequest(const std::string &from, const StreamFilter &filter) const {
+    RequestInfo ri;
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source + "/0/streams";
+    ri.post = false;
+    if (!from.empty()) {
+        ri.extra_params = "&from=" + from;
+    }
+    ri.extra_params += "&filter=" + filterToString(filter);
+    return ri;
+}
+
 Error ConsumerImpl::UpdateFolderTokenIfNeeded(bool ignore_existing) {
     if (!folder_token_.empty() && !ignore_existing) {
         return nullptr;
@@ -703,7 +734,7 @@ Error ConsumerImpl::UpdateFolderTokenIfNeeded(bool ignore_existing) {
 RequestInfo ConsumerImpl::CreateFolderTokenRequest() const {
     RequestInfo ri;
     ri.host = endpoint_;
-    ri.api = "/asapo-authorizer/folder";
+    ri.api = "/asapo-authorizer/" + kConsumerProtocol.GetAuthorizerVersion() + "/folder";
     ri.post = true;
     ri.body =
         "{\"Folder\":\"" + source_path_ + "\",\"BeamtimeId\":\"" + source_credentials_.beamtime_id + "\",\"Token\":\""
@@ -743,7 +774,8 @@ Error ConsumerImpl::Acknowledge(std::string group_id, uint64_t id, std::string s
         return ConsumerErrorTemplates::kWrongInput.Generate("empty stream");
     }
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         +"/" + std::move(stream) +
         "/" + std::move(group_id) + "/" + std::to_string(id);
     ri.post = true;
@@ -764,7 +796,8 @@ IdList ConsumerImpl::GetUnacknowledgedMessages(std::string group_id,
         return {};
     }
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         +"/" + std::move(stream) +
         "/" + std::move(group_id) + "/nacks";
     ri.extra_params = "&from=" + std::to_string(from_id) + "&to=" + std::to_string(to_id);
@@ -789,7 +822,8 @@ uint64_t ConsumerImpl::GetLastAcknowledgedMessage(std::string group_id, std::str
         return 0;
     }
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         +"/" + std::move(stream) +
         "/" + std::move(group_id) + "/lastack";
 
@@ -824,7 +858,8 @@ Error ConsumerImpl::NegativeAcknowledge(std::string group_id,
         return ConsumerErrorTemplates::kWrongInput.Generate("empty stream");
     }
     RequestInfo ri;
-    ri.api = "/database/" + source_credentials_.beamtime_id + "/" + source_credentials_.data_source +
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
         +"/" + std::move(stream) +
         "/" + std::move(group_id) + "/" + std::to_string(id);
     ri.post = true;
@@ -835,7 +870,77 @@ Error ConsumerImpl::NegativeAcknowledge(std::string group_id,
     return err;
 }
 void ConsumerImpl::InterruptCurrentOperation() {
-    interrupt_flag_= true;
+    interrupt_flag_ = true;
+}
+
+uint64_t ConsumerImpl::GetCurrentDatasetCount(std::string stream, bool include_incomplete, Error* err) {
+    RequestInfo ri = GetSizeRequestForDatasetStream(stream, include_incomplete);
+    return GetCurrentCount(stream, ri, err);
+}
+
+RequestInfo ConsumerImpl::GetSizeRequestForDatasetStream(std::string &stream, bool include_incomplete) const {
+    RequestInfo ri = GetSizeRequestForSingleMessagesStream(stream);
+    ri.extra_params = std::string("&incomplete=") + (include_incomplete ? "true" : "false");
+    return ri;
+}
+
+uint64_t ConsumerImpl::GetCurrentCount(std::string stream, const RequestInfo &ri, Error* err) {
+    auto responce = BrokerRequestWithTimeout(ri, err);
+    if (*err) {
+        return 0;
+    }
+    return ParseGetCurrentCountResponce(err, responce);
 }
 
+uint64_t ConsumerImpl::ParseGetCurrentCountResponce(Error* err, const std::string &responce) const {
+    JsonStringParser parser(responce);
+    uint64_t size;
+    if ((*err = parser.GetUInt64("size", &size)) != nullptr) {
+        return 0;
+    }
+    return size;
 }
+
+RequestInfo ConsumerImpl::GetSizeRequestForSingleMessagesStream(std::string &stream) const {
+    RequestInfo ri;
+    ri.api = "/" + kConsumerProtocol.GetBrokerVersion() + "/beamtime/" + source_credentials_.beamtime_id + "/"
+        + source_credentials_.data_source +
+        +"/" + std::move(stream) + "/size";
+    return ri;
+}
+
+RequestInfo ConsumerImpl::GetVersionRequest() const {
+    RequestInfo ri;
+    ri.host = endpoint_;
+    ri.api = "/asapo-discovery/" + kConsumerProtocol.GetDiscoveryVersion() + "/version";
+    ri.extra_params = "&client=consumer&protocol=" + kConsumerProtocol.GetVersion();
+    return ri;
+}
+
+Error ConsumerImpl::GetServerVersionInfo(std::string* server_info, bool* supported) {
+    auto ri = GetVersionRequest();
+    RequestOutput output;
+    auto err = ProcessRequest(&output, ri, nullptr);
+    if (err) {
+        return err;
+    }
+    return ExtractVersionFromResponse(output.string_output,"consumer",server_info,supported);
+}
+
+Error ConsumerImpl::GetVersionInfo(std::string* client_info, std::string* server_info, bool* supported) {
+    if (client_info == nullptr && server_info == nullptr && supported == nullptr) {
+        return ConsumerErrorTemplates::kWrongInput.Generate("missing parameters");
+    }
+    if (client_info != nullptr) {
+        *client_info =
+            "software version: " + std::string(kVersion) + ", consumer protocol: " + kConsumerProtocol.GetVersion();
+    }
+
+    if (server_info != nullptr || supported != nullptr) {
+        return GetServerVersionInfo(server_info,supported);
+    }
+
+    return nullptr;
+}
+
+}
\ No newline at end of file
diff --git a/consumer/api/cpp/src/consumer_impl.h b/consumer/api/cpp/src/consumer_impl.h
index 0697b5f96067597f58fa6bb43ff6461c8b01d109..ef0ee3ac8679b32d415dee65442565e28da66558 100644
--- a/consumer/api/cpp/src/consumer_impl.h
+++ b/consumer/api/cpp/src/consumer_impl.h
@@ -80,9 +80,11 @@ class ConsumerImpl final : public asapo::Consumer {
     std::string GetBeamtimeMeta(Error* err) override;
 
     uint64_t GetCurrentSize(std::string stream, Error* err) override;
+    uint64_t GetCurrentDatasetCount(std::string stream, bool include_incomplete, Error* err) override;
 
     Error GetById(uint64_t id, MessageMeta* info, MessageData* data, std::string stream) override;
 
+    Error GetVersionInfo(std::string* client_info,std::string* server_info, bool* supported) override;
 
     void SetTimeout(uint64_t timeout_ms) override;
     void ForceNoRdma() override;
@@ -99,7 +101,7 @@ class ConsumerImpl final : public asapo::Consumer {
 
     Error RetrieveData(MessageMeta* info, MessageData* data) override;
 
-    StreamInfos GetStreamList(std::string from, Error* err) override;
+    StreamInfos GetStreamList(std::string from, StreamFilter filter, Error* err) override;
     void SetResendNacs(bool resend, uint64_t delay_ms, uint64_t resend_attempts) override;
 
     virtual void InterruptCurrentOperation() override;
@@ -110,6 +112,7 @@ class ConsumerImpl final : public asapo::Consumer {
     std::unique_ptr<NetClient> net_client__;
     std::mutex net_client_mutex__; // Required for the lazy initialization of net_client
   private:
+    Error ProcessDiscoverServiceResult(Error err, std::string* uri_to_set);
     Error GetDataFromFileTransferService(MessageMeta* info, MessageData* data, bool retry_with_new_token);
     Error GetDataFromFile(MessageMeta* info, MessageData* data);
     static const std::string kBrokerServiceName;
@@ -138,10 +141,14 @@ class ConsumerImpl final : public asapo::Consumer {
     Error FtsSizeRequestWithTimeout(MessageMeta* info);
     Error ProcessPostRequest(const RequestInfo& request, RequestOutput* response, HttpCode* code);
     Error ProcessGetRequest(const RequestInfo& request, RequestOutput* response, HttpCode* code);
-
     RequestInfo PrepareRequestInfo(std::string api_url, bool dataset, uint64_t min_size);
     std::string OpToUriCmd(GetMessageServerOperation op);
     Error UpdateFolderTokenIfNeeded(bool ignore_existing);
+
+    uint64_t GetCurrentCount(std::string stream, const RequestInfo& ri, Error* err);
+    RequestInfo GetStreamListRequest(const std::string &from, const StreamFilter &filter) const;
+    Error GetServerVersionInfo(std::string* server_info, bool* supported) ;
+
     std::string endpoint_;
     std::string current_broker_uri_;
     std::string current_fts_uri_;
@@ -159,6 +166,13 @@ class ConsumerImpl final : public asapo::Consumer {
     uint64_t delay_ms_;
     uint64_t resend_attempts_;
     std::atomic<bool> interrupt_flag_{ false};
+
+  RequestInfo GetSizeRequestForSingleMessagesStream(std::string &stream) const;
+  RequestInfo GetSizeRequestForDatasetStream(std::string &stream, bool include_incomplete) const;
+  uint64_t ParseGetCurrentCountResponce(Error* err, const std::string &responce) const;
+  RequestInfo GetDiscoveryRequest(const std::string &service_name) const;
+  RequestInfo GetVersionRequest() const;
+
 };
 
 }
diff --git a/consumer/api/cpp/src/fabric_consumer_client.cpp b/consumer/api/cpp/src/fabric_consumer_client.cpp
index 8c80895cb4a1048fb10cab4128677e58f7dbe6b8..774513e1f4d7dab3abce541dfa3e4b1261d46d5b 100644
--- a/consumer/api/cpp/src/fabric_consumer_client.cpp
+++ b/consumer/api/cpp/src/fabric_consumer_client.cpp
@@ -3,6 +3,7 @@
 #include <iostream>
 #include "fabric_consumer_client.h"
 #include "rds_response_error.h"
+#include "asapo/common/internal/version.h"
 
 using namespace asapo;
 
@@ -33,6 +34,7 @@ Error FabricConsumerClient::GetData(const MessageMeta* info, MessageData* data)
     }
 
     GenericRequestHeader request_header{kOpcodeGetBufferData, info->buf_id, info->size};
+    strncpy(request_header.api_version, kConsumerProtocol.GetRdsVersion().c_str(), kMaxVersionSize);
     memcpy(request_header.message, mr->GetDetails(), sizeof(fabric::MemoryRegionDetails));
     GenericNetworkResponse response{};
 
diff --git a/consumer/api/cpp/src/rds_response_error.h b/consumer/api/cpp/src/rds_response_error.h
index dfd5859a66ca8e10c2fcaac54e8d87e74bdc1639..8bc8d64c53d24138e10b17458e8af4311df08391 100644
--- a/consumer/api/cpp/src/rds_response_error.h
+++ b/consumer/api/cpp/src/rds_response_error.h
@@ -12,6 +12,11 @@ namespace RdsResponseErrorTemplates {
 auto const kNetErrorReauthorize = RdsResponseErrorTemplate {
     "RDS response Reauthorize", NetworkErrorCode::kNetErrorReauthorize
 };
+
+auto const kNetErrorNotSupported = RdsResponseErrorTemplate {
+    "RDS unsupported client", NetworkErrorCode::kNetErrorNotSupported
+};
+
 auto const kNetErrorWarning = RdsResponseErrorTemplate {
     "RDS response Warning", NetworkErrorCode::kNetErrorWarning
 };
@@ -33,6 +38,8 @@ inline Error ConvertRdsResponseToError(NetworkErrorCode error_code) {
     switch (error_code) {
     case kNetErrorNoError:
         return nullptr;
+    case kNetErrorNotSupported:
+        return RdsResponseErrorTemplates::kNetErrorNotSupported.Generate();
     case kNetErrorReauthorize:
         return RdsResponseErrorTemplates::kNetErrorReauthorize.Generate();
     case kNetErrorWarning:
diff --git a/consumer/api/cpp/src/tcp_consumer_client.cpp b/consumer/api/cpp/src/tcp_consumer_client.cpp
index a2bcc349cb90152409dfc3a68f808cfeed68a5dd..d58f05d6bb3213beeb39ec7e0a539ed181a68345 100644
--- a/consumer/api/cpp/src/tcp_consumer_client.cpp
+++ b/consumer/api/cpp/src/tcp_consumer_client.cpp
@@ -2,6 +2,7 @@
 #include "asapo/io/io_factory.h"
 #include "asapo/common/networking.h"
 #include "rds_response_error.h"
+#include "asapo/common/internal/version.h"
 
 namespace asapo {
 
@@ -13,6 +14,7 @@ TcpConsumerClient::TcpConsumerClient() : io__{GenerateDefaultIO()}, connection_p
 Error TcpConsumerClient::SendGetDataRequest(SocketDescriptor sd, const MessageMeta* info) const noexcept {
     Error err;
     GenericRequestHeader request_header{kOpcodeGetBufferData, info->buf_id, info->size};
+    strncpy(request_header.api_version, kConsumerProtocol.GetRdsVersion().c_str(), kMaxVersionSize);
     io__->Send(sd, &request_header, sizeof(request_header), &err);
     if (err) {
         connection_pool__->ReleaseConnection(sd);
@@ -43,6 +45,10 @@ Error TcpConsumerClient::ReceiveResponce(SocketDescriptor sd) const noexcept {
     }
     if (response.error_code) {
         switch (response.error_code) {
+        case kNetErrorNotSupported:
+                io__->CloseSocket(sd, nullptr);
+                connection_pool__->ReleaseConnection(sd);
+                break;
         case kNetErrorWrongRequest:
             io__->CloseSocket(sd, nullptr);
             break;
diff --git a/consumer/api/cpp/unittests/test_consumer_impl.cpp b/consumer/api/cpp/unittests/test_consumer_impl.cpp
index 26020e46e30a5551f3bf1eb818fbadc8fd8eaaf0..6ad5448e5d336702803094aa454d40e4292e9555 100644
--- a/consumer/api/cpp/unittests/test_consumer_impl.cpp
+++ b/consumer/api/cpp/unittests/test_consumer_impl.cpp
@@ -14,6 +14,7 @@
 #include "asapo/http_client/http_error.h"
 #include "mocking.h"
 #include "../src/tcp_consumer_client.h"
+#include "asapo/common/internal/version.h"
 
 using asapo::ConsumerFactory;
 using asapo::Consumer;
@@ -66,6 +67,8 @@ class ConsumerImplTests : public Test {
   MessageMeta info;
   std::string expected_server_uri = "test:8400";
   std::string expected_broker_uri = "asapo-broker:5005";
+  std::string expected_consumer_protocol = "v0.1";
+  std::string expected_broker_api = expected_broker_uri + "/" + expected_consumer_protocol;
   std::string expected_fts_uri = "asapo-file-transfer:5008";
   std::string expected_token = "token";
   std::string expected_path = "/tmp/beamline/beamtime";
@@ -120,7 +123,7 @@ class ConsumerImplTests : public Test {
 
   }
   void MockGet(const std::string &response, asapo::HttpCode return_code = HttpCode::OK) {
-      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_broker_uri), _, _)).WillOnce(DoAll(
+      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_broker_api), _, _)).WillOnce(DoAll(
           SetArgPointee<1>(return_code),
           SetArgPointee<2>(nullptr),
           Return(response)
@@ -128,14 +131,15 @@ class ConsumerImplTests : public Test {
   }
 
   void MockGetError() {
-      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_broker_uri), _, _)).WillOnce(DoAll(
+      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_broker_api), _, _)).WillOnce(DoAll(
           SetArgPointee<1>(HttpCode::NotFound),
           SetArgPointee<2>(asapo::IOErrorTemplates::kUnknownIOError.Generate().release()),
           Return("")
       ));
   }
   void MockGetServiceUri(std::string service, std::string result) {
-      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/" + service), _,
+      EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/" + service+"?token="
+          + expected_token+"&protocol=v0.1"), _,
                                           _)).WillOnce(DoAll(
           SetArgPointee<1>(HttpCode::OK),
           SetArgPointee<2>(nullptr),
@@ -200,7 +204,7 @@ TEST_F(ConsumerImplTests, DefaultStreamIsDetector) {
     MockGetBrokerUri();
 
     EXPECT_CALL(mock_http_client,
-                Get_t(expected_broker_uri + "/database/beamtime_id/detector/stream/" + expected_group_id
+                Get_t(expected_broker_api + "/beamtime/beamtime_id/detector/stream/" + expected_group_id
                           +
                               "/next?token="
                           + expected_token, _,
@@ -215,7 +219,7 @@ TEST_F(ConsumerImplTests, DefaultStreamIsDetector) {
 TEST_F(ConsumerImplTests, GetNextUsesCorrectUriWithStream) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
                                             expected_stream + "/" + expected_group_id + "/next?token="
                                             + expected_token, _,
                                         _)).WillOnce(DoAll(
@@ -229,7 +233,7 @@ TEST_F(ConsumerImplTests, GetLastUsesCorrectUri) {
     MockGetBrokerUri();
 
     EXPECT_CALL(mock_http_client,
-                Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/"+ expected_stream+"/0/last?token="
+                Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/"+ expected_stream+"/0/last?token="
                           + expected_token, _,
                       _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
@@ -322,7 +326,7 @@ TEST_F(ConsumerImplTests, GetMessageReturnsWrongResponseFromHttpClient) {
 }
 
 TEST_F(ConsumerImplTests, GetMessageReturnsIfBrokerAddressNotFound) {
-    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/asapo-broker"), _,
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/asapo-broker"), _,
                                         _)).Times(AtLeast(2)).WillRepeatedly(DoAll(
         SetArgPointee<1>(HttpCode::NotFound),
         SetArgPointee<2>(nullptr),
@@ -334,8 +338,22 @@ TEST_F(ConsumerImplTests, GetMessageReturnsIfBrokerAddressNotFound) {
     ASSERT_THAT(err->Explain(), AllOf(HasSubstr(expected_server_uri), HasSubstr("unavailable")));
 }
 
+TEST_F(ConsumerImplTests, GetMessageReturnsUnsupportedClient) {
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/asapo-broker"), _,
+                                        _)).Times(AtLeast(2)).WillRepeatedly(DoAll(
+        SetArgPointee<1>(HttpCode::UnsupportedMediaType),
+        SetArgPointee<2>(nullptr),
+        Return("")));
+
+    consumer->SetTimeout(100);
+    auto err = consumer->GetNext(expected_group_id, &info, nullptr, expected_stream);
+
+    ASSERT_THAT(err, Eq(asapo::ConsumerErrorTemplates::kUnsupportedClient));
+}
+
+
 TEST_F(ConsumerImplTests, GetMessageReturnsIfBrokerUriEmpty) {
-    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/asapo-broker"), _,
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/asapo-broker"), _,
                                         _)).Times(AtLeast(2)).WillRepeatedly(DoAll(
         SetArgPointee<1>(HttpCode::OK),
         SetArgPointee<2>(nullptr),
@@ -356,7 +374,7 @@ TEST_F(ConsumerImplTests, GetDoNotCallBrokerUriIfAlreadyFound) {
     Mock::VerifyAndClearExpectations(&mock_http_client);
 
     EXPECT_CALL(mock_http_client,
-                Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/asap-broker"), _, _)).Times(0);
+                Get_t(_, _, _)).Times(0);
     MockGet("error_response");
     consumer->GetNext(expected_group_id, &info, nullptr, expected_stream);
 }
@@ -398,7 +416,7 @@ TEST_F(ConsumerImplTests, GetMessageReturnsNoDataAfterTimeoutEvenIfOtherErrorOcc
         Return("{\"op\":\"get_record_by_id\",\"id\":" + std::to_string(expected_dataset_id) +
             ",\"id_max\":2,\"next_stream\":\"""\"}")));
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(expected_dataset_id) + "?token="
                                             + expected_token, _, _)).Times(AtLeast(1)).WillRepeatedly(DoAll(
         SetArgPointee<1>(HttpCode::NotFound),
@@ -431,7 +449,7 @@ ACTION(AssignArg2) {
 }
 
 TEST_F(ConsumerImplTests, GetNextRetriesIfConnectionHttpClientErrorUntilTimeout) {
-    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/asapo-broker"), _,
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/asapo-broker"), _,
                                         _)).Times(AtLeast(2)).WillRepeatedly(DoAll(
         SetArgPointee<1>(HttpCode::OK),
         SetArgPointee<2>(nullptr),
@@ -564,7 +582,7 @@ TEST_F(ConsumerImplTests, GenerateNewGroupIdReturnsErrorCreateGroup) {
 TEST_F(ConsumerImplTests, GenerateNewGroupIdReturnsGroupID) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/creategroup?token=" + expected_token, _, "", _,
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_api + "/creategroup?token=" + expected_token, _, "", _,
                                          _)).WillOnce(DoAll(
         SetArgPointee<3>(HttpCode::OK),
         SetArgPointee<4>(nullptr),
@@ -582,7 +600,7 @@ TEST_F(ConsumerImplTests, ResetCounterByDefaultUsesCorrectUri) {
     consumer->SetTimeout(100);
 
     EXPECT_CALL(mock_http_client,
-                Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/" +
+                Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/" +
                     expected_group_id +
                     "/resetcounter?token=" + expected_token + "&value=0", _, _, _, _)).WillOnce(DoAll(
         SetArgPointee<3>(HttpCode::OK),
@@ -596,7 +614,7 @@ TEST_F(ConsumerImplTests, ResetCounterUsesCorrectUri) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
-    EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/" +
         expected_group_id +
         "/resetcounter?token=" + expected_token + "&value=10", _, _, _, _)).WillOnce(DoAll(
@@ -611,7 +629,7 @@ TEST_F(ConsumerImplTests, GetCurrentSizeUsesCorrectUri) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/size?token="
                                             + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
@@ -627,7 +645,7 @@ TEST_F(ConsumerImplTests, GetCurrentSizeErrorOnWrongResponce) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source +
         "/"+expected_stream+"/size?token="
                                             + expected_token, _, _)).WillRepeatedly(DoAll(
         SetArgPointee<1>(HttpCode::Unauthorized),
@@ -643,7 +661,7 @@ TEST_F(ConsumerImplTests, GetNDataErrorOnWrongParse) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source +
         "/stream/size?token="
                                             + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
@@ -661,7 +679,7 @@ TEST_F(ConsumerImplTests, GetByIdUsesCorrectUri) {
     auto to_send = CreateFI();
     auto json = to_send.Json();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(
                                                 expected_dataset_id) + "?token="
                                             + expected_token, _,
@@ -680,7 +698,7 @@ TEST_F(ConsumerImplTests, GetByIdTimeouts) {
     MockGetBrokerUri();
     consumer->SetTimeout(10);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(expected_dataset_id) + "?token="
                                             + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::Conflict),
@@ -696,7 +714,7 @@ TEST_F(ConsumerImplTests, GetByIdReturnsEndOfStream) {
     MockGetBrokerUri();
     consumer->SetTimeout(10);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(expected_dataset_id) + "?token="
                                             + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::Conflict),
@@ -712,7 +730,7 @@ TEST_F(ConsumerImplTests, GetByIdReturnsEndOfStreamWhenIdTooLarge) {
     MockGetBrokerUri();
     consumer->SetTimeout(10);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(expected_dataset_id) + "?token="
                                             + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::Conflict),
@@ -728,7 +746,7 @@ TEST_F(ConsumerImplTests, GetMetaDataOK) {
     MockGetBrokerUri();
     consumer->SetTimeout(100);
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source +
                                             "/default/0/meta/0?token="
                                             + expected_token, _,
                                         _)).WillOnce(DoAll(
@@ -834,7 +852,7 @@ TEST_F(ConsumerImplTests, QueryMessagesReturnRecords) {
     auto responce_string = "[" + json1 + "," + json2 + "]";
 
     EXPECT_CALL(mock_http_client,
-                Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0" +
+                Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0" +
                     "/querymessages?token=" + expected_token, _, expected_query_string, _, _)).WillOnce(DoAll(
         SetArgPointee<3>(HttpCode::OK),
         SetArgPointee<4>(nullptr),
@@ -854,7 +872,7 @@ TEST_F(ConsumerImplTests, QueryMessagesReturnRecords) {
 TEST_F(ConsumerImplTests, GetNextDatasetUsesCorrectUri) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/" +
                                             expected_group_id + "/next?token="
                                             + expected_token + "&dataset=true&minsize=0", _,
                                         _)).WillOnce(DoAll(
@@ -989,7 +1007,7 @@ TEST_F(ConsumerImplTests, GetDataSetReturnsParseError) {
 TEST_F(ConsumerImplTests, GetLastDatasetUsesCorrectUri) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
                                             expected_stream + "/0/last?token="
                                             + expected_token + "&dataset=true&minsize=1", _,
                                         _)).WillOnce(DoAll(
@@ -1003,7 +1021,7 @@ TEST_F(ConsumerImplTests, GetLastDatasetUsesCorrectUri) {
 TEST_F(ConsumerImplTests, GetDatasetByIdUsesCorrectUri) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/0/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/0/"
                                             + std::to_string(expected_dataset_id) + "?token="
                                             + expected_token + "&dataset=true" + "&minsize=0", _,
                                         _)).WillOnce(DoAll(
@@ -1017,36 +1035,37 @@ TEST_F(ConsumerImplTests, GetDatasetByIdUsesCorrectUri) {
 TEST_F(ConsumerImplTests, GetStreamListUsesCorrectUri) {
     MockGetBrokerUri();
     std::string return_streams =
-        R"({"streams":[{"lastId":123,"name":"test","timestampCreated":1000000},{"name":"test1","timestampCreated":2000000}]})";
+        std::string(R"({"streams":[{"lastId":123,"name":"test","timestampCreated":1000000,"timestampLast":1000,"finished":false,"nextStream":""},)")+
+        R"({"lastId":124,"name":"test1","timestampCreated":2000000,"timestampLast":2000,"finished":true,"nextStream":"next"}]})";
     EXPECT_CALL(mock_http_client,
-                Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/0/streams"
-                          + "?token=" + expected_token + "&from=stream_from", _,
+                Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/0/streams"
+                          + "?token=" + expected_token + "&from=stream_from&filter=all", _,
                       _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
         SetArgPointee<2>(nullptr),
         Return(return_streams)));
 
     asapo::Error err;
-    auto streams = consumer->GetStreamList("stream_from", &err);
+    auto streams = consumer->GetStreamList("stream_from",asapo::StreamFilter::kAllStreams, &err);
     ASSERT_THAT(err, Eq(nullptr));
     ASSERT_THAT(streams.size(), Eq(2));
     ASSERT_THAT(streams.size(), 2);
-    ASSERT_THAT(streams[0].Json(false), R"({"name":"test","timestampCreated":1000000})");
-    ASSERT_THAT(streams[1].Json(false), R"({"name":"test1","timestampCreated":2000000})");
+    ASSERT_THAT(streams[0].Json(), R"({"lastId":123,"name":"test","timestampCreated":1000000,"timestampLast":1000,"finished":false,"nextStream":""})");
+    ASSERT_THAT(streams[1].Json(), R"({"lastId":124,"name":"test1","timestampCreated":2000000,"timestampLast":2000,"finished":true,"nextStream":"next"})");
 }
 
 TEST_F(ConsumerImplTests, GetStreamListUsesCorrectUriWithoutFrom) {
     MockGetBrokerUri();
     EXPECT_CALL(mock_http_client,
-                Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/0/streams"
-                          + "?token=" + expected_token, _,
+                Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/0/streams"
+                          + "?token=" + expected_token+"&filter=finished", _,
                       _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
         SetArgPointee<2>(nullptr),
         Return("")));;
 
     asapo::Error err;
-    auto streams = consumer->GetStreamList("", &err);
+    auto streams = consumer->GetStreamList("",asapo::StreamFilter::kFinishedStreams, &err);
 }
 
 void ConsumerImplTests::MockBeforeFTS(MessageData* data) {
@@ -1063,7 +1082,7 @@ void ConsumerImplTests::ExpectFolderToken() {
         expected_beamtime_id
         + "\",\"Token\":\"" + expected_token + "\"}";
 
-    EXPECT_CALL(mock_http_client, Post_t(HasSubstr(expected_server_uri + "/asapo-authorizer/folder"), _,
+    EXPECT_CALL(mock_http_client, Post_t(HasSubstr(expected_server_uri + "/asapo-authorizer/v0.1/folder"), _,
                                          expected_folder_query_string, _, _)).WillOnce(DoAll(
         SetArgPointee<3>(HttpCode::OK),
         SetArgPointee<4>(nullptr),
@@ -1081,7 +1100,7 @@ ACTION_P(AssignArg3, assign) {
 }
 
 void ConsumerImplTests::ExpectFileTransfer(const asapo::ConsumerErrorTemplate* p_err_template) {
-    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/transfer"),
+    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/v0.1/transfer"),
                                                     expected_cookie,
                                                     expected_fts_query_string,
                                                     _,
@@ -1094,7 +1113,7 @@ void ConsumerImplTests::ExpectFileTransfer(const asapo::ConsumerErrorTemplate* p
 }
 
 void ConsumerImplTests::ExpectRepeatedFileTransfer() {
-    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/transfer"),
+    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/v0.1/transfer"),
                                                     expected_cookie,
                                                     expected_fts_query_string,
                                                     _,
@@ -1139,7 +1158,7 @@ TEST_F(ConsumerImplTests, FileTransferReadsFileSize) {
         Return("{\"file_size\":5}")
     ));
 
-    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/transfer"),
+    EXPECT_CALL(mock_http_client, PostReturnArray_t(HasSubstr(expected_fts_uri + "/v0.1/transfer"),
                                                     expected_cookie,
                                                     expected_fts_query_string,
                                                     _,
@@ -1180,7 +1199,7 @@ TEST_F(ConsumerImplTests, GetMessageTriesToGetTokenAgainIfTransferFailed) {
 TEST_F(ConsumerImplTests, AcknowledgeUsesCorrectUri) {
     MockGetBrokerUri();
     auto expected_acknowledge_command = "{\"Op\":\"ackmessage\"}";
-    EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/" +
         expected_group_id
                                              + "/" + std::to_string(expected_dataset_id) + "?token="
@@ -1196,7 +1215,7 @@ TEST_F(ConsumerImplTests, AcknowledgeUsesCorrectUri) {
 
 void ConsumerImplTests::ExpectIdList(bool error) {
     MockGetBrokerUri();
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/" +
         expected_group_id + "/nacks?token=" + expected_token + "&from=1&to=0", _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
@@ -1214,7 +1233,7 @@ TEST_F(ConsumerImplTests, GetUnAcknowledgedListReturnsIds) {
 }
 
 void ConsumerImplTests::ExpectLastAckId(bool empty_response) {
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/" +
         expected_group_id + "/lastack?token=" + expected_token, _, _)).WillOnce(DoAll(
         SetArgPointee<1>(HttpCode::OK),
@@ -1252,7 +1271,7 @@ TEST_F(ConsumerImplTests, GetByIdErrorsForId0) {
 TEST_F(ConsumerImplTests, ResendNacks) {
     MockGetBrokerUri();
 
-    EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/stream/"
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/stream/"
                                             + expected_group_id + "/next?token="
                                             + expected_token + "&resend_nacks=true&delay_ms=10000&resend_attempts=3", _,
                                         _)).WillOnce(DoAll(
@@ -1267,7 +1286,7 @@ TEST_F(ConsumerImplTests, ResendNacks) {
 TEST_F(ConsumerImplTests, NegativeAcknowledgeUsesCorrectUri) {
     MockGetBrokerUri();
     auto expected_neg_acknowledge_command = R"({"Op":"negackmessage","Params":{"DelayMs":10000}})";
-    EXPECT_CALL(mock_http_client, Post_t(expected_broker_uri + "/database/beamtime_id/" + expected_data_source + "/" +
+    EXPECT_CALL(mock_http_client, Post_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
         expected_stream + "/" +
         expected_group_id
                                              + "/" + std::to_string(expected_dataset_id) + "?token="
@@ -1307,4 +1326,47 @@ TEST_F(ConsumerImplTests, CanInterruptOperation) {
 
 }
 
+
+TEST_F(ConsumerImplTests, GetCurrentDataSetCounteUsesCorrectUri) {
+    MockGetBrokerUri();
+    consumer->SetTimeout(100);
+
+    EXPECT_CALL(mock_http_client, Get_t(expected_broker_api + "/beamtime/beamtime_id/" + expected_data_source + "/" +
+        expected_stream + "/size?token="
+                                            + expected_token+"&incomplete=true", _, _)).WillOnce(DoAll(
+        SetArgPointee<1>(HttpCode::OK),
+        SetArgPointee<2>(nullptr),
+        Return("{\"size\":10}")));
+    asapo::Error err;
+    auto size = consumer->GetCurrentDatasetCount(expected_stream,true, &err);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(size, Eq(10));
+}
+
+
+TEST_F(ConsumerImplTests, GetVersionInfoClientOnly) {
+    std::string client_info;
+    auto err = consumer->GetVersionInfo(&client_info,nullptr,nullptr);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(client_info, HasSubstr(std::string(asapo::kVersion)));
+    ASSERT_THAT(client_info, HasSubstr(asapo::kConsumerProtocol.GetVersion()));
+}
+
+TEST_F(ConsumerImplTests, GetVersionInfoWithServer) {
+
+    std::string result = R"({"softwareVersion":"20.03.1, build 7a9294ad","clientSupported":"no", "clientProtocol":{"versionInfo":"v0.2"}})";
+
+    EXPECT_CALL(mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?token=token&client=consumer&protocol=v0.1"), _,_)).WillOnce(DoAll(
+        SetArgPointee<1>(HttpCode::OK),
+        SetArgPointee<2>(nullptr),
+        Return(result)));
+
+    std::string client_info,server_info;
+    auto err = consumer->GetVersionInfo(&client_info,&server_info,nullptr);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(server_info, HasSubstr("20.03.1"));
+    ASSERT_THAT(server_info, HasSubstr("v0.2"));
+}
+
+
 }
diff --git a/consumer/api/cpp/unittests/test_fabric_consumer_client.cpp b/consumer/api/cpp/unittests/test_fabric_consumer_client.cpp
index 9e32cd3842cc11d96003b342ade885a0d8d7e8a7..dcbd311e16fef382fa1f1d710eab1041912ddf78 100644
--- a/consumer/api/cpp/unittests/test_fabric_consumer_client.cpp
+++ b/consumer/api/cpp/unittests/test_fabric_consumer_client.cpp
@@ -33,6 +33,7 @@ MATCHER_P6(M_CheckSendRequest, op_code, buf_id, data_size, mr_addr, mr_length, m
            && data->data_id == uint64_t(buf_id)
            && data->data_size == uint64_t(data_size)
            && mr->addr == uint64_t(mr_addr)
+           && strcmp(data->api_version, "v0.1") == 0
            && mr->length == uint64_t(mr_length)
            && mr->key == uint64_t(mr_key);
 }
diff --git a/consumer/api/cpp/unittests/test_rds_error_mapper.cpp b/consumer/api/cpp/unittests/test_rds_error_mapper.cpp
index f19dc56022525914a17fe0b04fee1340571c4216..c5161d0ee8fc35c46d549a968059e7c7ee64aba1 100644
--- a/consumer/api/cpp/unittests/test_rds_error_mapper.cpp
+++ b/consumer/api/cpp/unittests/test_rds_error_mapper.cpp
@@ -19,4 +19,6 @@ TEST(ConvertRdsResponseToError, TestAllCases) {
                 Eq(RdsResponseErrorTemplates::kNetAuthorizationError));
     ASSERT_THAT(ConvertRdsResponseToError(NetworkErrorCode::kNetErrorInternalServerError),
                 Eq(RdsResponseErrorTemplates::kNetErrorInternalServerError));
+    ASSERT_THAT(ConvertRdsResponseToError(NetworkErrorCode::kNetErrorNotSupported),
+                Eq(RdsResponseErrorTemplates::kNetErrorNotSupported));
 }
diff --git a/consumer/api/cpp/unittests/test_tcp_consumer_client.cpp b/consumer/api/cpp/unittests/test_tcp_consumer_client.cpp
index b1df9c9db1e6bd84ae1c21aba9968e0bc7c82a36..ebf4102b3aa4931b9efd884a83acf134e3168b8e 100644
--- a/consumer/api/cpp/unittests/test_tcp_consumer_client.cpp
+++ b/consumer/api/cpp/unittests/test_tcp_consumer_client.cpp
@@ -44,6 +44,7 @@ MATCHER_P4(M_CheckSendRequest, op_code, buf_id, data_size, message,
     return ((asapo::GenericRequestHeader*) arg)->op_code == op_code
            && ((asapo::GenericRequestHeader*) arg)->data_id == uint64_t(buf_id)
            && ((asapo::GenericRequestHeader*) arg)->data_size == uint64_t(data_size)
+           && strcmp(((asapo::GenericRequestHeader*) arg)->api_version, "v0.1") == 0
            && strcmp(((asapo::GenericRequestHeader*) arg)->message, message) == 0;
 }
 
@@ -212,6 +213,19 @@ TEST_F(TcpClientTests, GetResponceReturnsWrongRequest) {
     ASSERT_THAT(err, Ne(nullptr));
 }
 
+TEST_F(TcpClientTests, GetResponceReturnsUnsupported) {
+    ExpectNewConnection(false, true);
+    ExpectSendRequest(expected_sd, true);
+    ExpectGetResponce(expected_sd, true, asapo::kNetErrorNotSupported);
+    EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _));
+    EXPECT_CALL(mock_connection_pool, ReleaseConnection(expected_sd));
+
+    auto err = client->GetData(&info, &data);
+
+    ASSERT_THAT(err, Ne(nullptr));
+}
+
+
 TEST_F(TcpClientTests, ErrorGettingData) {
     ExpectNewConnection(false, true);
     ExpectSendRequest(expected_sd, true);
diff --git a/consumer/api/python/asapo_consumer.pxd b/consumer/api/python/asapo_consumer.pxd
index e309bd22dfdf54ff4270dc90896971e2a80fd1cb..fa755a375a83d05f4469cb4fad4030ea0130caf2 100644
--- a/consumer/api/python/asapo_consumer.pxd
+++ b/consumer/api/python/asapo_consumer.pxd
@@ -13,7 +13,7 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
     pass
   cppclass ErrorInterface:
     string Explain()
-    const CustomErrorData* GetCustomData()
+    CustomErrorData* GetCustomData()
   cppclass ErrorTemplateInterface:
     pass
   cdef bool operator==(Error lhs, ErrorTemplateInterface rhs)
@@ -46,8 +46,7 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
     string data_source
     string user_token
   cppclass StreamInfo:
-    string Json(bool add_last_id)
-    bool SetFromJson(string json_str, bool read_last_id)
+    string Json()
 
 cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
   cppclass NetworkConnectionType:
@@ -55,6 +54,11 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
   NetworkConnectionType NetworkConnectionType_kUndefined "asapo::NetworkConnectionType::kUndefined"
   NetworkConnectionType NetworkConnectionType_kAsapoTcp "asapo::NetworkConnectionType::kAsapoTcp"
   NetworkConnectionType NetworkConnectionType_kFabric "asapo::NetworkConnectionType::kFabric"
+  cppclass StreamFilter:
+    pass
+  StreamFilter StreamFilter_kAllStreams "asapo::StreamFilter::kAllStreams"
+  StreamFilter StreamFilter_kFinishedStreams "asapo::StreamFilter::kFinishedStreams"
+  StreamFilter StreamFilter_kUnfinishedStreams "asapo::StreamFilter::kUnfinishedStreams"
 
 cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass Consumer:
@@ -66,6 +70,7 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
         Error GetLast(MessageMeta* info, MessageData* data, string stream)
         Error GetById(uint64_t id, MessageMeta* info, MessageData* data, string stream)
         uint64_t GetCurrentSize(string stream, Error* err)
+        uint64_t GetCurrentDatasetCount(string stream, bool include_incomplete, Error* err)
         Error SetLastReadMarker(string group_id, uint64_t value, string stream)
         Error ResetLastReadMarker(string group_id, string stream)
         Error Acknowledge(string group_id, uint64_t id, string stream)
@@ -79,9 +84,10 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
         DataSet GetLastDataset(uint64_t min_size, string stream, Error* err)
         DataSet GetDatasetById(uint64_t id, uint64_t min_size, string stream, Error* err)
         Error RetrieveData(MessageMeta* info, MessageData* data)
-        vector[StreamInfo] GetStreamList(string from_stream, Error* err)
+        vector[StreamInfo] GetStreamList(string from_stream, StreamFilter filter, Error* err)
         void SetResendNacs(bool resend, uint64_t delay_ms, uint64_t resend_attempts)
         void InterruptCurrentOperation()
+        Error GetVersionInfo(string* client_info,string* server_info, bool* supported)
 
 cdef extern from "asapo/asapo_consumer.h" namespace "asapo" nogil:
     cdef cppclass ConsumerFactory:
@@ -98,6 +104,8 @@ cdef extern from "asapo/asapo_consumer.h" namespace "asapo":
   ErrorTemplateInterface kLocalIOError "asapo::ConsumerErrorTemplates::kLocalIOError"
   ErrorTemplateInterface kWrongInput "asapo::ConsumerErrorTemplates::kWrongInput"
   ErrorTemplateInterface kPartialData "asapo::ConsumerErrorTemplates::kPartialData"
+  ErrorTemplateInterface kUnsupportedClient "asapo::ConsumerErrorTemplates::kUnsupportedClient"
+
 
   cdef cppclass ConsumerErrorData:
     uint64_t id
diff --git a/consumer/api/python/asapo_consumer.pyx.in b/consumer/api/python/asapo_consumer.pyx.in
index 399b510ed75bf03734ad8435b9c93f11b33770ab..1db15ed4a72531c1bb9403c632a4ea9231f4da58 100644
--- a/consumer/api/python/asapo_consumer.pyx.in
+++ b/consumer/api/python/asapo_consumer.pyx.in
@@ -46,6 +46,9 @@ class AsapoUnavailableServiceError(AsapoConsumerError):
 class AsapoLocalIOError(AsapoConsumerError):
   pass
 
+class AsapoUnsupportedClientError(AsapoConsumerError):
+  pass
+
 class AsapoStreamFinishedError(AsapoConsumerError):
   def __init__(self,message,id_max=None,next_stream=None):
     AsapoConsumerError.__init__(self,message)
@@ -99,11 +102,22 @@ cdef throw_exception(Error& err, res = None):
             raise AsapoUnavailableServiceError(error_string)
     elif err == kInterruptedTransaction:
             raise AsapoInterruptedTransactionError(error_string)
+    elif err == kUnsupportedClient:
+            raise AsapoUnsupportedClientError(error_string)
     else:
         raise AsapoConsumerError(error_string)
 
 cdef class PyConsumer:
     cdef unique_ptr[Consumer] c_consumer
+    cdef StreamFilter _filter_to_cfilter(self,filter) except + :
+        if filter == "all":
+            return StreamFilter_kAllStreams
+        elif filter == "finished":
+            return StreamFilter_kFinishedStreams
+        elif filter == "unfinished":
+            return StreamFilter_kUnfinishedStreams
+        else:
+            raise AsapoWrongInputError("wrong filter, must be all|finished|unfinished")
     def _op(self, op, group_id, stream, meta_only, uint64_t id):
         cdef MessageMeta info
         cdef string b_group_id = _bytes(group_id)
@@ -167,6 +181,16 @@ cdef class PyConsumer:
         if err:
             throw_exception(err)
         return size
+    def get_current_dataset_count(self, stream = "default", bool include_incomplete = False):
+        cdef Error err
+        cdef uint64_t size
+        cdef string b_stream = _bytes(stream)
+        with nogil:
+            size =  self.c_consumer.get().GetCurrentDatasetCount(b_stream,include_incomplete,&err)
+        err_str = _str(GetErrorString(&err))
+        if err:
+            throw_exception(err)
+        return size
     def set_timeout(self,timeout):
         self.c_consumer.get().SetTimeout(timeout)
     def force_no_rdma(self):
@@ -192,6 +216,21 @@ cdef class PyConsumer:
         if err:
             throw_exception(err)
         return
+    def get_version_info(self, from_server = "true"):
+        cdef string client_info,server_info
+        cdef bool supported
+        cdef string* p_server_info =  &server_info if from_server else <string*>NULL
+        cdef bool* p_supported =  &supported if from_server else <bool*>NULL
+        cdef Error err
+        with nogil:
+                err =  self.c_consumer.get().GetVersionInfo(&client_info,p_server_info,p_supported)
+        if err:
+            throw_exception(err)
+        version = {}
+        if from_server:
+            return {'client': _str(client_info), 'server': _str(server_info), 'supported': supported}
+        else:
+            return {'client': _str(client_info)}
     def reset_lastread_marker(self,group_id, stream = "default"):
         cdef string b_group_id = _bytes(group_id)
         cdef string b_stream = _bytes(stream)
@@ -209,17 +248,18 @@ cdef class PyConsumer:
         if err:
             throw_exception(err)
         return _str(group_id)
-    def get_stream_list(self, from_stream = ""):
+    def get_stream_list(self,from_stream = "",filter="all"):
         cdef Error err
         cdef vector[StreamInfo] streams
         cdef string b_from_stream = _bytes(from_stream)
+        cdef StreamFilter stream_filter = self._filter_to_cfilter(filter)
         with nogil:
-            streams = self.c_consumer.get().GetStreamList(b_from_stream,&err)
+            streams = self.c_consumer.get().GetStreamList(b_from_stream,stream_filter,&err)
         if err:
             throw_exception(err)
         list = []
         for stream in streams:
-            list.append(json.loads(_str(stream.Json(False))))
+            list.append(json.loads(_str(stream.Json())))
         return list
     def acknowledge(self, group_id, uint64_t id, stream = "default"):
         cdef string b_group_id = _bytes(group_id)
diff --git a/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json b/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
index 76d3a8480012356fccf9daa46e676e6595b00ca4..5225327c48c0208094f1bd8906975e8723c0f09e 100644
--- a/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
+++ b/deploy/asapo_helm_chart/asapo/configs/asapo-authorizer.json
@@ -6,7 +6,8 @@
   {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"{{ .Values.common.offlineDir }}/test_facility/gpfs/test2/2019/data/asapo_test2"}],
   "RootBeamtimesFolder":"{{ .Values.common.offlineDir }}",
   "CurrentBeamlinesFolder":"{{ .Values.common.onlineDir }}",
-  "SecretFile":"/etc/authorizer/auth_secret.key",
+  "UserSecretFile":"/etc/authorizer/auth_secret.key",
+  "AdminSecretFile":"/etc/authorizer/auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
   {
diff --git a/deploy/asapo_helm_chart/asapo/configs/asapo-broker.json b/deploy/asapo_helm_chart/asapo/configs/asapo-broker.json
index 63452b6f31afeef34bfcdb1f8c6a3f74fc4c77b5..196b79c99019f1fdb86eafdd6573ac7a6ae894bf 100644
--- a/deploy/asapo_helm_chart/asapo/configs/asapo-broker.json
+++ b/deploy/asapo_helm_chart/asapo/configs/asapo-broker.json
@@ -1,10 +1,11 @@
 {
   "DatabaseServer":"asapo-mongodb:{{ .Values.ownServices.mongodb.port }}",
   "DiscoveryServer": "asapo-discovery:{{ .Values.ownServices.discovery.port }}",
+  "AuthorizationServer": "asapo-authorizer:{{ .Values.ownServices.authorization.port }}",
   "PerformanceDbServer":"{{ .Chart.Name }}-influxdb:{{ .Values.influxdb.influxdb.service.port }}",
+  "MonitorPerformance": true,
   "PerformanceDbName": "asapo_brokers",
   "Port": {{ .Values.ownServices.broker.port }},
   "CheckResendInterval":10,
-  "LogLevel":"debug",
-  "SecretFile":"/etc/broker/auth_secret.key"
+  "LogLevel":"debug"
 }
diff --git a/deploy/asapo_helm_chart/asapo/configs/asapo-receiver.json b/deploy/asapo_helm_chart/asapo/configs/asapo-receiver.json
index c034223fc2a8c0082d924ab5ba5db797f2dfc037..6dde5c8adc0a08182691d7158e067bff336f8815 100644
--- a/deploy/asapo_helm_chart/asapo/configs/asapo-receiver.json
+++ b/deploy/asapo_helm_chart/asapo/configs/asapo-receiver.json
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer": "{{ .Chart.Name }}-influxdb:{{ .Values.influxdb.influxdb.service.port }}",
+  "MonitorPerformance": true,
   "PerformanceDbName": "asapo_receivers",
   "DatabaseServer": "asapo-mongodb:{{ .Values.ownServices.mongodb.port }}",
   "DiscoveryServer": "asapo-discovery:{{ .Values.ownServices.discovery.port }}",
diff --git a/deploy/asapo_helm_chart/asapo/templates/auth-secret-admin.yaml b/deploy/asapo_helm_chart/asapo/templates/auth-secret-admin.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0a6b49b7922e3ef2c59a542498b267b14b8aebe5
--- /dev/null
+++ b/deploy/asapo_helm_chart/asapo/templates/auth-secret-admin.yaml
@@ -0,0 +1,7 @@
+apiVersion: v1
+kind: Secret
+metadata:
+  name: auth-secret-admin
+type: Opaque
+data:
+  auth_secret_admin.key: {{ .Values.common.authSecretAdmin | b64enc | quote }}
diff --git a/deploy/asapo_helm_chart/asapo/templates/authorizer-deployment.yaml b/deploy/asapo_helm_chart/asapo/templates/authorizer-deployment.yaml
index e923be6acd41da528beaf088b8852125d3b8cf55..100e114d5a5980a0c42ef64807e75e8d8fb2a48c 100644
--- a/deploy/asapo_helm_chart/asapo/templates/authorizer-deployment.yaml
+++ b/deploy/asapo_helm_chart/asapo/templates/authorizer-deployment.yaml
@@ -16,6 +16,7 @@ spec:
       annotations:
         checksum/config: {{ .Files.Get "configs/asapo-authorizer.json" | sha256sum  }}
         checksum/secret: {{ include (print $.Template.BasePath "/auth-secret.yaml") . | sha256sum }}
+        checksum/secret-admin: {{ include (print $.Template.BasePath "/auth-secret-admin.yaml") . | sha256sum }}
         checksum/fluentd-config: {{ .Files.Get "configs/asapo-fluentd.conf" | sha256sum  }}
     spec:
       volumes:
@@ -26,6 +27,8 @@ spec:
                   name: asapo-authorizer-config
               - secret:
                   name: auth-secret
+              - secret:
+                  name: auth-secret-admin
         - name: shared-volume-offline
           persistentVolumeClaim:
             claimName: asapo-offline-pv
diff --git a/deploy/asapo_helm_chart/asapo/values.yaml b/deploy/asapo_helm_chart/asapo/values.yaml
index cfb5e3657d3b5627d0b562d2649f5397da687709..beccb7c2193ae10139e8325d3aa9ac175901bea3 100644
--- a/deploy/asapo_helm_chart/asapo/values.yaml
+++ b/deploy/asapo_helm_chart/asapo/values.yaml
@@ -55,6 +55,7 @@ ownServices:
 
 common:
   authSecret: "12ljzgneasfd"
+  authSecretAdmin: "12ljzgneasf123d"
   offlineDir: "/test_offline"
   onlineDir: "/test_online"
   asapoVersionTag: "develop"
diff --git a/deploy/asapo_services/asap3.tfvars b/deploy/asapo_services/asap3.tfvars
index 972dafffdeb937973bdf418d21d7b038514f6236..c3c5434b8828865ead3f552ff91240392cedabe8 100644
--- a/deploy/asapo_services/asap3.tfvars
+++ b/deploy/asapo_services/asap3.tfvars
@@ -1,6 +1,10 @@
 elk_logs = true
 
-asapo_image_tag = "develop"
+asapo_imagename_suffix = ""
+asapo_image_tag = ""
+
+influxdb_version="1.8.4"
+
 
 service_dir="/gpfs/asapo/shared/service_dir"
 online_dir="/beamline"
diff --git a/deploy/asapo_services/scripts/asapo-brokers.nmd.tpl b/deploy/asapo_services/scripts/asapo-brokers.nmd.tpl
index e96dc1a954aacfdc6ecef2a325926544e171b151..14f5eaf5baf9587059c4b644d3dbfea8a29eb137 100644
--- a/deploy/asapo_services/scripts/asapo-brokers.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-brokers.nmd.tpl
@@ -30,7 +30,7 @@ job "asapo-brokers" {
 	    security_opt = ["no-new-privileges"]
 	    userns_mode = "host"
         image = "yakser/asapo-broker${image_suffix}"
-	    force_pull = true
+	    force_pull = ${force_pull_images}
         volumes = ["local/config.json:/var/lib/broker/config.json"]
         %{ if ! nomad_logs  }
           logging {
@@ -67,6 +67,10 @@ job "asapo-brokers" {
         }
       }
 
+      meta {
+        perf_monitor = "${perf_monitor}"
+      }
+
       template {
          source        = "${scripts_dir}/broker.json.tpl"
          destination   = "local/config.json"
diff --git a/deploy/asapo_services/scripts/asapo-fts.nmd.tpl b/deploy/asapo_services/scripts/asapo-fts.nmd.tpl
index 055d567c0da050d0898ca58f72164b7b30065048..5f4d98278ccb419e54ce9465895e47ea22c92bbb 100644
--- a/deploy/asapo_services/scripts/asapo-fts.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-fts.nmd.tpl
@@ -30,7 +30,7 @@ job "asapo-file-transfer" {
 	    security_opt = ["no-new-privileges"]
 	    userns_mode = "host"
         image = "yakser/asapo-file-transfer${image_suffix}"
-	    force_pull = true
+	    force_pull = ${force_pull_images}
         volumes = ["local/config.json:/var/lib/file_transfer/config.json",
                            "${offline_dir}:${offline_dir}",
                            "${online_dir}:${online_dir}"
diff --git a/deploy/asapo_services/scripts/asapo-perfmetrics.nmd.tpl b/deploy/asapo_services/scripts/asapo-perfmetrics.nmd.tpl
index 6033581d872d53ae108a892f0754c5bff98b901a..cd717baff86ea82bd6bf5b54719d827caa3aacfa 100644
--- a/deploy/asapo_services/scripts/asapo-perfmetrics.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-perfmetrics.nmd.tpl
@@ -14,7 +14,7 @@ job "asapo-perfmetrics" {
 #  }
 
   group "perfmetrics" {
-    count = 1
+    count = "%{ if perf_monitor }1%{ else }0%{ endif }"
     restart {
       attempts = 2
       interval = "3m"
@@ -30,11 +30,13 @@ job "asapo-perfmetrics" {
 	    security_opt = ["no-new-privileges"]
 	    userns_mode = "host"
         image = "influxdb:${influxdb_version}"
-        volumes = ["/${service_dir}/influxdb:/var/lib/influxdb"]
+        volumes = ["/${service_dir}/influxdb2:/var/lib/influxdb2"]
       }
 
       env {
         PRE_CREATE_DB="asapo_receivers;asapo_brokers"
+        INFLUXDB_BIND_ADDRESS="127.0.0.1:$${NOMAD_PORT_influxdb_rpc}"
+        INFLUXDB_HTTP_BIND_ADDRESS=":$${NOMAD_PORT_influxdb}"
       }
 
       resources {
@@ -43,6 +45,9 @@ job "asapo-perfmetrics" {
           port "influxdb" {
           static = "${influxdb_port}"
           }
+          port "influxdb_rpc" {
+          static = "${influxdb_rpc_port}"
+          }
         }
       }
 
@@ -72,6 +77,7 @@ job "asapo-perfmetrics" {
       env {
         GF_SERVER_DOMAIN = "$${attr.unique.hostname}"
         GF_SERVER_ROOT_URL = "%(protocol)s://%(domain)s/performance/"
+        GF_SERVER_HTTP_PORT = "${grafana_port}"
       }
 
       config {
diff --git a/deploy/asapo_services/scripts/asapo-receivers.nmd.tpl b/deploy/asapo_services/scripts/asapo-receivers.nmd.tpl
index 075100884089b185c4695d7f7c063763fda1776c..2096021f5cc99da4edcfbecafaf5c4a2343855d3 100644
--- a/deploy/asapo_services/scripts/asapo-receivers.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-receivers.nmd.tpl
@@ -35,7 +35,7 @@ job "asapo-receivers" {
 	    userns_mode = "host"
 	    privileged = true
         image = "yakser/asapo-receiver${image_suffix}"
-	    force_pull = true
+	    force_pull = ${force_pull_images}
         volumes = ["local/config.json:/var/lib/receiver/config.json",
                    "${offline_dir}:${offline_dir}",
                    "${online_dir}:${online_dir}"]
@@ -84,6 +84,7 @@ job "asapo-receivers" {
         receiver_dataserver_nthreads = "${receiver_dataserver_nthreads}"
         receiver_receive_to_disk_threshold = "${receiver_receive_to_disk_threshold}"
         receiver_network_modes = "${receiver_network_modes}"
+        perf_monitor = "${perf_monitor}"
       }
 
       template {
diff --git a/deploy/asapo_services/scripts/asapo-services.nmd.tpl b/deploy/asapo_services/scripts/asapo-services.nmd.tpl
index dae2350c53835c4a4aed16d239a90fb81a8838f9..9cb6298aa08ab6edf9b2f3897c21ead5a7dd9514 100644
--- a/deploy/asapo_services/scripts/asapo-services.nmd.tpl
+++ b/deploy/asapo_services/scripts/asapo-services.nmd.tpl
@@ -19,7 +19,7 @@ job "asapo-services" {
 	    security_opt = ["no-new-privileges"]
 	    userns_mode = "host"
         image = "yakser/asapo-authorizer${image_suffix}"
-	force_pull = true
+	    force_pull = ${force_pull_images}
         volumes = ["local/config.json:/var/lib/authorizer/config.json",
                            "${offline_dir}:${offline_dir}",
                            "${online_dir}:${online_dir}"]
@@ -78,6 +78,11 @@ job "asapo-services" {
         destination   = "local/secret.key"
         change_mode   = "restart"
       }
+      template {
+        source        = "${scripts_dir}/auth_secret_admin.key"
+        destination   = "local/secret_admin.key"
+        change_mode   = "restart"
+      }
    }
   } #authorizer
   group "asapo-discovery" {
@@ -91,7 +96,7 @@ job "asapo-services" {
 	    security_opt = ["no-new-privileges"]
 	    userns_mode = "host"
         image = "yakser/asapo-discovery${image_suffix}"
-	    force_pull = true
+	    force_pull = ${force_pull_images}
         volumes = ["local/config.json:/var/lib/discovery/config.json"]
         %{ if ! nomad_logs  }
         logging {
@@ -120,7 +125,7 @@ job "asapo-services" {
         check {
           name     = "alive"
           type     = "http"
-          path     = "/asapo-receiver"
+          path     = "/health"
           interval = "10s"
           timeout  = "2s"
           initial_status =   "passing"
diff --git a/deploy/asapo_services/scripts/asapo.auto.tfvars.in b/deploy/asapo_services/scripts/asapo.auto.tfvars.in
index 2212ce1f349579fc874591fbf5884a5db746269b..764b672753af79dcad8aebf46aa1205e6672a9f9 100644
--- a/deploy/asapo_services/scripts/asapo.auto.tfvars.in
+++ b/deploy/asapo_services/scripts/asapo.auto.tfvars.in
@@ -6,10 +6,12 @@ nginx_version = "1.14"
 elasticsearch_version = "7.3.2"
 kibana_version = "7.3.2"
 mongo_version = "4.0.0"
-grafana_version="latest"
-influxdb_version = "latest"
-elk_logs = true
+grafana_version="7.5.0"
+influxdb_version = "2.0.4"
+elk_logs = false
+perf_monitor = false
 nomad_logs = false
+force_pull_images = false
 
 job_scripts_dir = "/var/run/asapo"
 
@@ -32,8 +34,9 @@ authorizer_total_memory_size = 256
 discovery_total_memory_size = 256
 
 
-grafana_port = 3000
+grafana_port = 3301
 influxdb_port = 8086
+influxdb_rpc_port = 8088
 mongo_port = 27017
 fluentd_port = 9880
 fluentd_port_stream = 24224
diff --git a/deploy/asapo_services/scripts/auth_secret_admin.key b/deploy/asapo_services/scripts/auth_secret_admin.key
new file mode 100644
index 0000000000000000000000000000000000000000..295004c5898aa11fe557d7178e073eef2a8606ec
--- /dev/null
+++ b/deploy/asapo_services/scripts/auth_secret_admin.key
@@ -0,0 +1 @@
+sfdskln123_w
\ No newline at end of file
diff --git a/deploy/asapo_services/scripts/authorizer.json.tpl b/deploy/asapo_services/scripts/authorizer.json.tpl
index e73af4ae5dc891ba81098ee47a31a09bb7481f1d..6e63929204521df21479cdce3c86a07db09c5f77 100644
--- a/deploy/asapo_services/scripts/authorizer.json.tpl
+++ b/deploy/asapo_services/scripts/authorizer.json.tpl
@@ -1,12 +1,13 @@
 {
   "Port": {{ env "NOMAD_PORT_authorizer" }},
   "LogLevel":"debug",
-  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test/2019/data/asapo_test"},
-  {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test1/2019/data/asapo_test1"},
-  {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test2/2019/data/asapo_test2"}],
+  "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test/2019/data/asapo_test", "beamline-path":"{{ env "NOMAD_META_online_dir" }}/test/current"},
+  {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test1/2019/data/asapo_test1", "beamline-path":"{{ env "NOMAD_META_online_dir" }}/test1/current"},
+  {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"{{ env "NOMAD_META_offline_dir" }}/test_facility/gpfs/test2/2019/data/asapo_test2", "beamline-path":"{{ env "NOMAD_META_online_dir" }}/test2/current"}],
   "RootBeamtimesFolder":"{{ env "NOMAD_META_offline_dir" }}",
   "CurrentBeamlinesFolder":"{{ env "NOMAD_META_online_dir" }}",
-  "SecretFile":"/local/secret.key",
+  "UserSecretFile":"/local/secret.key",
+  "AdminSecretFile":"/local/secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
     {
diff --git a/deploy/asapo_services/scripts/broker.json.tpl b/deploy/asapo_services/scripts/broker.json.tpl
index 9b0f75f1e72f49db9ae54c8e3e85250466b58ed2..493c986e427c76ff9c226d931055ad6f5e398563 100644
--- a/deploy/asapo_services/scripts/broker.json.tpl
+++ b/deploy/asapo_services/scripts/broker.json.tpl
@@ -1,10 +1,11 @@
 {
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
+  "AuthorizationServer": "localhost:8400/asapo-authorizer",
   "PerformanceDbServer":"localhost:8400/influxdb",
+  "MonitorPerformance": {{ env "NOMAD_META_perf_monitor" }},
   "CheckResendInterval":10,
   "PerformanceDbName": "asapo_brokers",
   "Port":{{ env "NOMAD_PORT_broker" }},
-  "LogLevel":"info",
-  "SecretFile":"/local/secret.key"
+  "LogLevel":"info"
 }
diff --git a/deploy/asapo_services/scripts/receiver.json.tpl b/deploy/asapo_services/scripts/receiver.json.tpl
index c4f0d33b20cf255c005650b9d849e9ebd2562f3f..e5214deed638118fac712fe6dced5bf56ebaf790 100644
--- a/deploy/asapo_services/scripts/receiver.json.tpl
+++ b/deploy/asapo_services/scripts/receiver.json.tpl
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8400/influxdb",
+  "MonitorPerformance": {{ env "NOMAD_META_perf_monitor" }},
   "PerformanceDbName": "asapo_receivers",
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
diff --git a/deploy/asapo_services/scripts/resources.tf b/deploy/asapo_services/scripts/resources.tf
index 1539150f6436c3920ebe58520fb05637882b205a..869b4664f4cde62fbd8495ea5aa42c00ed8b3378 100644
--- a/deploy/asapo_services/scripts/resources.tf
+++ b/deploy/asapo_services/scripts/resources.tf
@@ -1,37 +1,37 @@
 resource "nomad_job" "asapo-nginx" {
-  jobspec = "${data.template_file.nginx.rendered}"
+  jobspec = data.template_file.nginx.rendered
 }
 
 resource "nomad_job" "asapo-mongo" {
-  jobspec = "${data.template_file.asapo_mongo.rendered}"
+  jobspec = data.template_file.asapo_mongo.rendered
 }
 
 resource "nomad_job" "asapo-perfmetrics" {
-  jobspec = "${data.template_file.asapo_perfmetrics.rendered}"
+  jobspec = data.template_file.asapo_perfmetrics.rendered
 }
 
 resource "nomad_job" "asapo-logging" {
-  jobspec = "${data.template_file.asapo_logging.rendered}"
+  jobspec = data.template_file.asapo_logging.rendered
   depends_on = [null_resource.nginx]
 }
 
 resource "nomad_job" "asapo-services" {
-  jobspec = "${data.template_file.asapo_services.rendered}"
+  jobspec = data.template_file.asapo_services.rendered
   depends_on = [null_resource.nginx,null_resource.mongo,null_resource.influxdb,null_resource.fluentd,null_resource.elasticsearch]
 }
 
 resource "nomad_job" "asapo-receivers" {
-  jobspec = "${data.template_file.asapo_receivers.rendered}"
+  jobspec = data.template_file.asapo_receivers.rendered
   depends_on = [nomad_job.asapo-services,null_resource.asapo-authorizer,null_resource.asapo-discovery]
 }
 
 resource "nomad_job" "asapo-brokers" {
-  jobspec = "${data.template_file.asapo_brokers.rendered}"
+  jobspec = data.template_file.asapo_brokers.rendered
   depends_on = [nomad_job.asapo-services,null_resource.asapo-authorizer,null_resource.asapo-discovery]
 }
 
 resource "nomad_job" "asapo-fts" {
-  jobspec = "${data.template_file.asapo_fts.rendered}"
+  jobspec = data.template_file.asapo_fts.rendered
   depends_on = [nomad_job.asapo-services,null_resource.asapo-authorizer,null_resource.asapo-discovery]
 }
 
diff --git a/deploy/asapo_services/scripts/resources_services.tf b/deploy/asapo_services/scripts/resources_services.tf
index fa84a79b18e64e5a4a1431b9c8589f08330d2e02..e3cff85810efb48af3bc90347c5e20010204df6b 100644
--- a/deploy/asapo_services/scripts/resources_services.tf
+++ b/deploy/asapo_services/scripts/resources_services.tf
@@ -8,7 +8,7 @@ resource "null_resource" "nginx" {
 
 resource "null_resource" "influxdb" {
   provisioner "local-exec" {
-    command = "asapo-wait-service influxdb"
+    command = "asapo-wait-service influxdb ${var.perf_monitor}"
   }
   depends_on = [nomad_job.asapo-perfmetrics]
 }
diff --git a/deploy/asapo_services/scripts/templates.tf b/deploy/asapo_services/scripts/templates.tf
index 90f6b71ec4bc3f5cb5c6e66e376e57783aaa50a3..e5dae73a5b2e39f2550df7de744e1142ed60d962 100644
--- a/deploy/asapo_services/scripts/templates.tf
+++ b/deploy/asapo_services/scripts/templates.tf
@@ -29,6 +29,7 @@ data "template_file" "asapo_services" {
     authorizer_port = "${var.authorizer_port}"
     discovery_port = "${var.discovery_port}"
     asapo_user = "${var.asapo_user}"
+    force_pull_images = "${var.force_pull_images}"
   }
 }
 
@@ -47,6 +48,8 @@ data "template_file" "asapo_receivers" {
     receiver_network_modes = "${var.receiver_network_modes}"
     asapo_user = "${var.asapo_user}"
     n_receivers = "${var.n_receivers}"
+    force_pull_images = "${var.force_pull_images}"
+    perf_monitor = "${var.perf_monitor}"
   }
 }
 
@@ -58,6 +61,8 @@ data "template_file" "asapo_brokers" {
     nomad_logs = "${var.nomad_logs}"
     asapo_user = "${var.asapo_user}"
     n_brokers = "${var.n_brokers}"
+    force_pull_images = "${var.force_pull_images}"
+    perf_monitor = "${var.perf_monitor}"
   }
 }
 
@@ -72,6 +77,7 @@ data "template_file" "asapo_fts" {
     nomad_logs = "${var.nomad_logs}"
     asapo_user = "${var.asapo_user}"
     n_fts = "${var.n_fts}"
+    force_pull_images = "${var.force_pull_images}"
   }
 }
 
@@ -86,6 +92,8 @@ data "template_file" "asapo_perfmetrics" {
     influxdb_total_memory_size = "${var.influxdb_total_memory_size}"
     influxdb_port = "${var.influxdb_port}"
     asapo_user = "${var.asapo_user}"
+    influxdb_rpc_port = "${var.influxdb_rpc_port}"
+    perf_monitor = "${var.perf_monitor}"
     }
 }
 
diff --git a/deploy/asapo_services/scripts/vars.tf b/deploy/asapo_services/scripts/vars.tf
index 5d69d474089cd565a3bb5650c78a18e6a21eaef7..2e95cf15a863cff1d0565cf5f36d9e4cb0c5bcd3 100644
--- a/deploy/asapo_services/scripts/vars.tf
+++ b/deploy/asapo_services/scripts/vars.tf
@@ -1,4 +1,7 @@
 variable "elk_logs" {}
+variable "perf_monitor" {}
+
+variable "force_pull_images" {}
 
 variable "asapo_user" {}
 
@@ -60,6 +63,8 @@ variable "grafana_port" {}
 
 variable "influxdb_port" {}
 
+variable "influxdb_rpc_port" {}
+
 variable "mongo_port" {}
 
 variable "fluentd_port" {}
diff --git a/deploy/nomad_consul_docker/nomad.hcl.tpl b/deploy/nomad_consul_docker/nomad.hcl.tpl
index 1d67eb3f695986732be629468f657b7c75b58d39..2a5716ee38a99f7c301b25544705a0f8f14cc7d4 100644
--- a/deploy/nomad_consul_docker/nomad.hcl.tpl
+++ b/deploy/nomad_consul_docker/nomad.hcl.tpl
@@ -27,7 +27,9 @@ client {
 plugin "docker" {
   config {
     endpoint = "$docker_endpoint"
-
+    gc {
+        image = false
+    }
     tls {
       cert = "/etc/nomad/cert.pem"
       key  = "/etc/nomad/key.pem"
diff --git a/deploy/nomad_consul_docker/orchestr_config.py b/deploy/nomad_consul_docker/orchestr_config.py
index 5e91651d3bd413a2359790284df9da1b6023b904..52f5da0e6d6fc01be04c96a3d73320170919e1ba 100644
--- a/deploy/nomad_consul_docker/orchestr_config.py
+++ b/deploy/nomad_consul_docker/orchestr_config.py
@@ -59,8 +59,6 @@ def process_file(file_in,file_out):
     filein = open(file_in)
     src = Template(filein.read())
     d = set_parameters()
-    print d
-
     with open(file_out, "w") as out:
         out.write(src.substitute(d))
 
diff --git a/deploy/nomad_consul_docker/scripts/provider.tf b/deploy/nomad_consul_docker/scripts/provider.tf
index 0df2412ca44f14d2a2f03f3c3b0d41d33a9e6cb3..922c3af25a15d5f0668ef0aef8bb1e64ef9238dd 100644
--- a/deploy/nomad_consul_docker/scripts/provider.tf
+++ b/deploy/nomad_consul_docker/scripts/provider.tf
@@ -1,5 +1,5 @@
 provider "nomad" {
   address = "http://localhost:4646"
-  secret_id = "${chomp(file("/var/nomad/token"))}"
+  secret_id = chomp(file("/var/nomad/token"))
 }
 
diff --git a/discovery/src/asapo_discovery/common/consts.go b/discovery/src/asapo_discovery/common/consts.go
index 9789e656f1b7cadca21b370582f815a6e23453bc..e43ea3b1e52bae972c34cb40e658b3d1cc99a98b 100644
--- a/discovery/src/asapo_discovery/common/consts.go
+++ b/discovery/src/asapo_discovery/common/consts.go
@@ -6,3 +6,4 @@ const  (
 	NameBrokerService = "asapo-broker"
 	NameReceiverService = "asapo-receiver"
 )
+
diff --git a/discovery/src/asapo_discovery/go.mod b/discovery/src/asapo_discovery/go.mod
index 8661775101cee70211a111c2fb3bcf48bfb8f7c8..d1fbc93e8aa6557c91054b95b2925088f5a36359 100644
--- a/discovery/src/asapo_discovery/go.mod
+++ b/discovery/src/asapo_discovery/go.mod
@@ -1,16 +1,14 @@
 module asapo_discovery
 
-go 1.14
+go 1.16
 
 replace asapo_common v0.0.0 => ../../../common/go/src/asapo_common
 
 require (
 	asapo_common v0.0.0
-	github.com/gorilla/mux v1.7.4 // indirect
+	github.com/gorilla/mux v1.8.0
 	github.com/hashicorp/consul/api v1.4.0
-	github.com/sirupsen/logrus v1.5.0 // indirect
-	github.com/stretchr/testify v1.4.0
-	k8s.io/api v0.17.0
+	github.com/stretchr/testify v1.7.0
 	k8s.io/apimachinery v0.17.0
 	k8s.io/client-go v0.17.0
 )
diff --git a/discovery/src/asapo_discovery/go.sum b/discovery/src/asapo_discovery/go.sum
index 859f1d959eb69a677e2ca214af6b005bd2a89e43..5b43fcb6be3e5b43431604d61ffe18ac5821eb4a 100644
--- a/discovery/src/asapo_discovery/go.sum
+++ b/discovery/src/asapo_discovery/go.sum
@@ -22,12 +22,10 @@ github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/dgrijalva/jwt-go v1.0.2 h1:KPldsxuKGsS2FPWsNeg9ZO18aCrGKujPoWXn2yo+KQM=
 github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
 github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
 github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
 github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
-github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
 github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
 github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
 github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
@@ -42,8 +40,6 @@ github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nA
 github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
 github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d h1:3PaI8p3seN09VjbTYC/QWlUZdZ1qS1zGjy7LH2Wt07I=
 github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
-github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
-github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
 github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
 github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
@@ -53,30 +49,29 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
 github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
 github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
 github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=
 github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
 github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
 github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
 github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI=
 github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g=
-github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
 github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
 github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
 github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
 github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
 github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d h1:7XGaL1e6bYS1yIonGp9761ExpPPV1ui0SAC59Yube9k=
 github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
-github.com/googleapis/gnostic v0.1.0 h1:rVsPeBmXbYv4If/cumu1AzZPwV58q433hvONV1UEZoI=
-github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
 github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8=
-github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc=
-github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
 github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
-github.com/hashicorp/consul v1.7.2 h1:pDEnRiUE8jOUlxIqzo8Jw3Zcsz6KSpygk2BjkrsASsk=
 github.com/hashicorp/consul/api v1.4.0 h1:jfESivXnO5uLdH650JU/6AnjRoHrLhULq0FnC3Kp9EY=
 github.com/hashicorp/consul/api v1.4.0/go.mod h1:xc8u05kyMa3Wjr9eEAsIAo3dg8+LywT5E/Cl7cNS5nU=
+github.com/hashicorp/consul/sdk v0.4.0 h1:zBtCfKJZcJDBvSCkQJch4ulp59m1rATFLKwNo/LYY30=
 github.com/hashicorp/consul/sdk v0.4.0/go.mod h1:fY08Y9z5SvJqevyZNy6WWPXiG3KwBPAvlcdx16zZ0fM=
+github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
 github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
 github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
 github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
@@ -84,13 +79,17 @@ github.com/hashicorp/go-hclog v0.12.0 h1:d4QkX8FRTYaKaCZBoXYY8zJX2BXjWxurN/GA2tk
 github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
 github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=
 github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
+github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=
 github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
+github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
 github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
 github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
 github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
+github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs=
 github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
 github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
 github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=
 github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
 github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
 github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
@@ -98,26 +97,27 @@ github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+
 github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
 github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
 github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
+github.com/hashicorp/memberlist v0.1.3 h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=
 github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
 github.com/hashicorp/serf v0.8.2 h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=
 github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
 github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
 github.com/imdario/mergo v0.3.5 h1:JboBksRwiiAJWvIYJVo46AfV+IAIKZpfrSzVKj42R4Q=
 github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
-github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=
-github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
 github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
-github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
 github.com/json-iterator/go v1.1.8 h1:QiWkFLKq0T7mpzwOTu6BzNDbfTE8OLrYhVKYMLF46Ok=
 github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
 github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
 github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
 github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
 github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/magefile/mage v1.10.0 h1:3HiXzCUY12kh9bIuyXShaVe529fJfyqoVM42o/uom2g=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
 github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
 github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
 github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
@@ -128,10 +128,12 @@ github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcME
 github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
 github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
 github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/miekg/dns v1.0.14 h1:9jZdLNd/P4+SfEJ0TNyxYpsK8N4GtfylBLqtbYN1sbA=
 github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
 github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
 github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
 github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=
 github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
 github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
 github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
@@ -150,20 +152,22 @@ github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+
 github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
 github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
 github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
 github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
 github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
+github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs=
 github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
 github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
+github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
 github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
+github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
 github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
-github.com/sirupsen/logrus v1.5.0 h1:1N5EYkVAPEywqZRJd7cwnRtCb6xJx7NH3T3WUTF980Q=
-github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo=
+github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
 github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
 github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
 github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
@@ -173,12 +177,12 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
 github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
 golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586 h1:7KByu05hhLed2MO29w7p1XfZvZ13m8mub3shuVftRs0=
 golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@@ -202,8 +206,6 @@ golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAG
 golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
 golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
 golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
 golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -217,10 +219,8 @@ golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5h
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9 h1:1/DFK4b7JH8DmkqhUk48onnSfrPzImPoVxuomtbT2nk=
@@ -233,8 +233,6 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
 golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
 golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=
 golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs=
-golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -244,6 +242,7 @@ golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3
 google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
 google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
 google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0 h1:KxkO13IPW4Lslp2bz+KHP2E3gtFlrIGNThxkZQ3g+4c=
 google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
 google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
 google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@@ -251,6 +250,7 @@ google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRn
 google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
 gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
 gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
@@ -261,37 +261,24 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
 gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 k8s.io/api v0.17.0 h1:H9d/lw+VkZKEVIUc8F3wgiQ+FUXTTr21M87jXLU7yqM=
 k8s.io/api v0.17.0/go.mod h1:npsyOePkeP0CPwyGfXDHxvypiYMJxBWAMpQxCaJ4ZxI=
-k8s.io/api v0.18.0 h1:lwYk8Vt7rsVTwjRU6pzEsa9YNhThbmbocQlKvNBB4EQ=
-k8s.io/api v0.18.0/go.mod h1:q2HRQkfDzHMBZL9l/y9rH63PkQl4vae0xRT+8prbrK8=
 k8s.io/apimachinery v0.17.0 h1:xRBnuie9rXcPxUkDizUsGvPf1cnlZCFu210op7J7LJo=
 k8s.io/apimachinery v0.17.0/go.mod h1:b9qmWdKlLuU9EBh+06BtLcSf/Mu89rWL33naRxs1uZg=
-k8s.io/apimachinery v0.18.0 h1:fuPfYpk3cs1Okp/515pAf0dNhL66+8zk8RLbSX+EgAE=
-k8s.io/apimachinery v0.18.0/go.mod h1:9SnR/e11v5IbyPCGbvJViimtJ0SwHG4nfZFjU77ftcA=
 k8s.io/client-go v0.17.0 h1:8QOGvUGdqDMFrm9sD6IUFl256BcffynGoe80sxgTEDg=
 k8s.io/client-go v0.17.0/go.mod h1:TYgR6EUHs6k45hb6KWjVD6jFZvJV4gHDikv/It0xz+k=
-k8s.io/client-go v1.5.1 h1:XaX/lo2/u3/pmFau8HN+sB5C/b4dc4Dmm2eXjBH4p1E=
-k8s.io/client-go v11.0.0+incompatible h1:LBbX2+lOwY9flffWlJM7f1Ct8V2SRNiMRDFeiwnJo9o=
-k8s.io/client-go v11.0.0+incompatible/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s=
 k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
 k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
 k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
 k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=
 k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
 k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
-k8s.io/kube-openapi v0.0.0-20200121204235-bf4fb3bd569c/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E=
 k8s.io/utils v0.0.0-20191114184206-e782cd3c129f h1:GiPwtSzdP43eI1hpPCbROQCCIgCuiMMNF8YUVLF3vJo=
 k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
-k8s.io/utils v0.0.0-20200327001022-6496210b90e8 h1:6JFbaLjRyBz8K2Jvt+pcT+N3vvwMZfg8MfVENwe9aag=
-k8s.io/utils v0.0.0-20200327001022-6496210b90e8/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
 sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
-sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw=
-sigs.k8s.io/structured-merge-diff/v3 v3.0.0 h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E=
-sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw=
 sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs=
 sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
-sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=
-sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
diff --git a/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go b/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go
new file mode 100644
index 0000000000000000000000000000000000000000..15a8f7b5c89c2f31b4c029279254523aa2d0c3db
--- /dev/null
+++ b/discovery/src/asapo_discovery/protocols/hard_coded_consumer.go
@@ -0,0 +1,14 @@
+package protocols
+
+func GetSupportedConsumerProtocols() []Protocol {
+	return []Protocol{
+		Protocol{"v0.1",
+			map[string]string{
+				"Discovery": "v0.1",
+				"Authorizer": "v0.1",
+				"Broker": "v0.1",
+				"File Transfer": "v0.1",
+				"Data cache service": "v0.1",
+			}, &protocolValidatorCurrent{}},
+	}
+}
diff --git a/discovery/src/asapo_discovery/protocols/hard_coded_producer.go b/discovery/src/asapo_discovery/protocols/hard_coded_producer.go
new file mode 100644
index 0000000000000000000000000000000000000000..515e242930e9c55231cb35dbc830cf25eb2180ec
--- /dev/null
+++ b/discovery/src/asapo_discovery/protocols/hard_coded_producer.go
@@ -0,0 +1,14 @@
+package protocols
+
+func GetSupportedProducerProtocols() []Protocol {
+	return []Protocol{
+		Protocol{"v0.1",
+			map[string]string{
+				"Discovery": "v0.1",
+				"Receiver": "v0.1",
+			}, &protocolValidatorCurrent{}},
+	}
+}
+
+
+
diff --git a/discovery/src/asapo_discovery/protocols/protocol_test.go b/discovery/src/asapo_discovery/protocols/protocol_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..458ff5589920fdbbe4111689fe7fd29cec50a5fa
--- /dev/null
+++ b/discovery/src/asapo_discovery/protocols/protocol_test.go
@@ -0,0 +1,33 @@
+package protocols
+
+import (
+	"github.com/stretchr/testify/assert"
+	"testing"
+)
+
+type protocolTest struct {
+	client   string
+	protocol string
+	result   bool
+	hint     string
+	message  string
+}
+
+var protocolTests = []protocolTest{
+// consumer
+	{"consumer", "v0.1", true, "", "current protocol"},
+	{"consumer", "v0.2", false, "unknown", "unknown protocol"},
+
+
+// producer
+	{"producer", "v0.1", true, "", "current protocol"},
+	{"producer", "v0.2", false, "unknown", "unknown protocol"},
+}
+
+func TestProtocolTests(t *testing.T) {
+	for _, ct := range protocolTests {
+		hint, ok := ValidateProtocol(ct.client, ct.protocol)
+		assert.Equal(t, ct.result, ok, ct.message)
+		assert.Contains(t, hint, ct.hint, ct.message)
+	}
+}
diff --git a/discovery/src/asapo_discovery/protocols/protocols.go b/discovery/src/asapo_discovery/protocols/protocols.go
new file mode 100644
index 0000000000000000000000000000000000000000..ada29e7f0e67348e233e8eb21f40903c0c12080a
--- /dev/null
+++ b/discovery/src/asapo_discovery/protocols/protocols.go
@@ -0,0 +1,84 @@
+package protocols
+
+import "errors"
+
+type protocolValidator interface {
+	IsValid() (hint string, ok bool)
+}
+
+type protocolValidatorCurrent struct {
+}
+
+func (p *protocolValidatorCurrent) IsValid() (hint string, ok bool) {
+	return "current", true
+}
+
+type Protocol struct {
+	Version   string
+	MicroserviceAPis map[string]string
+	validator protocolValidator
+}
+
+type ProtocolInfo  struct {
+	VersionInfo      string            `json:"versionInfo"`
+	MicroservicesApi map[string]string `json:"microservicesApi"`
+}
+
+func (p *Protocol) IsValid() (hint string, ok bool) {
+	return p.validator.IsValid()
+}
+
+func (p *Protocol) GetString() string {
+	hint, _ := p.validator.IsValid()
+	if hint != "" {
+		return p.Version + " (" + hint + ")"
+	} else {
+		return p.Version
+	}
+}
+
+func getSupportedProtocols(client string) ([]Protocol, error) {
+	switch client {
+	case "consumer":
+		return GetSupportedConsumerProtocols(), nil
+	case "producer":
+		return GetSupportedProducerProtocols(), nil
+	}
+	return nil, errors.New("unknown client")
+}
+
+func FindProtocol(client string, version string) (Protocol, error) {
+	protocols, err := getSupportedProtocols(client)
+	if err != nil {
+		return Protocol{},err
+	}
+	for _, protocol := range protocols {
+		if protocol.Version == version {
+			return protocol,nil
+		}
+	}
+	return Protocol{},errors.New("unknown protocol")
+}
+
+func ValidateProtocol(client string, version string) (hint string, ok bool) {
+	protocol, err := FindProtocol(client,version)
+	if err != nil {
+		return err.Error(), false
+	}
+	return protocol.IsValid()
+}
+
+func GetSupportedProtocolsArray(client string) ([]ProtocolInfo, error) {
+	protocols,err := getSupportedProtocols(client)
+	if err!=nil  {
+		return nil,err
+	}
+	res:=make([]ProtocolInfo,0)
+	for _,protocol := range protocols {
+		var info ProtocolInfo
+		info.VersionInfo = protocol.GetString()
+		info.MicroservicesApi = protocol.MicroserviceAPis
+		res = append(res, info)
+	}
+	return res,nil
+}
\ No newline at end of file
diff --git a/discovery/src/asapo_discovery/server/get_health.go b/discovery/src/asapo_discovery/server/get_health.go
new file mode 100644
index 0000000000000000000000000000000000000000..fdc697baf435f6bd7077bad2bdd7aa25284789fb
--- /dev/null
+++ b/discovery/src/asapo_discovery/server/get_health.go
@@ -0,0 +1,9 @@
+package server
+
+import (
+	"net/http"
+)
+
+func routeGetHealth(w http.ResponseWriter, r *http.Request) {
+	w.WriteHeader(http.StatusNoContent)
+}
diff --git a/discovery/src/asapo_discovery/server/get_health_test.go b/discovery/src/asapo_discovery/server/get_health_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..4d455e086fb8e9aede4d1fb6a4c1738ac244f442
--- /dev/null
+++ b/discovery/src/asapo_discovery/server/get_health_test.go
@@ -0,0 +1,12 @@
+package server
+
+import (
+	"github.com/stretchr/testify/assert"
+	"net/http"
+	"testing"
+)
+
+func TestHealth(t *testing.T) {
+	w := doRequest("/health")
+	assert.Equal(t, http.StatusNoContent, w.Code, "ok")
+}
diff --git a/discovery/src/asapo_discovery/server/get_receivers.go b/discovery/src/asapo_discovery/server/get_receivers.go
deleted file mode 100644
index 5fed8a36aa8be5856be7bad3f700b37933ea2760..0000000000000000000000000000000000000000
--- a/discovery/src/asapo_discovery/server/get_receivers.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package server
-
-import (
-	"net/http"
-	"asapo_common/logger"
-	"asapo_discovery/common"
-)
-
-func getService(service string) (answer []byte, code int) {
-	var err error
-	if (service == "asapo-receiver") {
-		answer, err = requestHandler.GetReceivers(settings.Receiver.UseIBAddress)
-	} else {
-		answer, err = requestHandler.GetSingleService(service)
-
-	}
-	log_str := "processing get "+service
-	if err != nil {
-		logger.Error(log_str + " - " + err.Error())
-		return []byte(err.Error()),http.StatusInternalServerError
-	}
-	logger.Debug(log_str + " -  got " + string(answer))
-	return answer, http.StatusOK
-}
-
-
-func routeGetReceivers(w http.ResponseWriter, r *http.Request) {
-	r.Header.Set("Content-type", "application/json")
-	answer,code := getService(common.NameReceiverService)
-	w.WriteHeader(code)
-	w.Write(answer)
-}
-
-func routeGetBroker(w http.ResponseWriter, r *http.Request) {
-	r.Header.Set("Content-type", "application/json")
-	answer,code := getService(common.NameBrokerService)
-	w.WriteHeader(code)
-	w.Write(answer)
-}
-
-func routeGetMongo(w http.ResponseWriter, r *http.Request) {
-	r.Header.Set("Content-type", "application/json")
-	answer,code := getService(common.NameMongoService)
-	w.WriteHeader(code)
-	w.Write(answer)
-}
-
-func routeGetFileTransferService(w http.ResponseWriter, r *http.Request) {
-	r.Header.Set("Content-type", "application/json")
-	answer,code := getService(common.NameFtsService)
-	w.WriteHeader(code)
-	w.Write(answer)
-}
\ No newline at end of file
diff --git a/discovery/src/asapo_discovery/server/get_version.go b/discovery/src/asapo_discovery/server/get_version.go
new file mode 100644
index 0000000000000000000000000000000000000000..3e2abebd5afd815adb2bda77982424acff6c5992
--- /dev/null
+++ b/discovery/src/asapo_discovery/server/get_version.go
@@ -0,0 +1,115 @@
+package server
+
+import (
+	"asapo_common/logger"
+	"asapo_common/utils"
+	"asapo_common/version"
+	"asapo_discovery/protocols"
+	"encoding/json"
+	"errors"
+	"net/http"
+)
+
+type versionInfo struct {
+	SoftwareVersion            string `json:"softwareVersion"`
+	ClientProtocol     protocols.ProtocolInfo `json:"clientProtocol"`
+	ClientSupported            string `json:"clientSupported"`
+	SupportedProtocols []protocols.ProtocolInfo `json:"supportedProtocols"`
+}
+
+func extractProtocol(r *http.Request) (string, error) {
+	keys := r.URL.Query()
+	protocol := keys.Get("protocol")
+	if protocol == "" {
+		return "", errors.New("cannot extract protocol from request")
+	}
+	return protocol, nil
+}
+
+func routeGetVersion(w http.ResponseWriter, r *http.Request) {
+	log_str := "processing get version"
+	logger.Debug(log_str)
+
+	if ok := checkDiscoveryApiVersion(w, r); !ok {
+		return
+	}
+	keys := r.URL.Query()
+	client := keys.Get("client")
+	protocol := keys.Get("protocol")
+	info, err := getVersionInfo(client, protocol)
+	if err != nil {
+		w.WriteHeader(http.StatusInternalServerError)
+		w.Write([]byte(err.Error()))
+		logger.Error(log_str + " - " + err.Error())
+		return
+	}
+	resp, _ := json.Marshal(&info)
+	w.Write(resp)
+}
+
+func checkDiscoveryApiVersion(w http.ResponseWriter, r *http.Request) bool {
+	_, ok := utils.PrecheckApiVersion(w, r, version.GetDiscoveryApiVersion())
+	return ok
+}
+
+func getVersionInfo(client string, ver string) (versionInfo, error) {
+	info, err := getCoreInfo(client)
+	if err != nil {
+		return versionInfo{}, err
+	}
+	if ver=="" {
+		return info, nil
+	}
+	updateClientInfo(client, ver, &info)
+	return info, nil
+}
+
+func getCoreInfo(client string) (versionInfo, error) {
+	var info versionInfo
+	info.SoftwareVersion = version.GetVersion()
+	if client=="" {
+		return info, nil
+	}
+	var err error
+	info.SupportedProtocols, err = protocols.GetSupportedProtocolsArray(client)
+	if err != nil {
+		return versionInfo{}, err
+	}
+	return info, nil
+}
+
+func updateClientInfo(client string, ver string, info *versionInfo) {
+	if client == "" {
+		return
+	}
+	pInfo,valid := getProtocolInfo(client, ver, info)
+	setSupported(valid, info)
+	if client == "consumer" {
+		info.ClientProtocol = pInfo
+	} else
+	if client == "producer" {
+		info.ClientProtocol = pInfo
+	}
+}
+
+func setSupported(valid bool, info *versionInfo) {
+	if valid {
+		info.ClientSupported = "yes"
+	} else {
+		info.ClientSupported = "no"
+	}
+}
+
+func getProtocolInfo(client string, ver string, info *versionInfo) (pInfo protocols.ProtocolInfo, valid bool) {
+	protocol, err := protocols.FindProtocol(client, ver)
+	if err != nil {
+		pInfo.VersionInfo = ver + " (" + err.Error() + ")"
+		valid = false
+	} else {
+		var hint string
+		hint, valid = protocol.IsValid()
+		pInfo.VersionInfo = ver + " (" + hint + ")"
+		pInfo.MicroservicesApi = protocol.MicroserviceAPis
+	}
+	return
+}
diff --git a/discovery/src/asapo_discovery/server/get_version_test.go b/discovery/src/asapo_discovery/server/get_version_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..5a1e99c8a159a36b9c884b51b83a8d97d11c851e
--- /dev/null
+++ b/discovery/src/asapo_discovery/server/get_version_test.go
@@ -0,0 +1,69 @@
+package server
+
+import (
+	"asapo_common/version"
+	"asapo_discovery/protocols"
+	"encoding/json"
+	"fmt"
+	"github.com/stretchr/testify/assert"
+	"net/http"
+	"testing"
+)
+
+var coreVer = version.GetVersion()
+
+var versionTests = []struct {
+	request string
+	result  versionInfo
+	code    int
+	message string
+}{
+	{"", versionInfo{
+		SoftwareVersion:        coreVer,
+		ClientProtocol: protocols.ProtocolInfo{},
+		ClientSupported:        "",
+	}, http.StatusOK, "no client"},
+	{"?client=consumer", versionInfo{
+		SoftwareVersion: coreVer,
+		ClientProtocol:     protocols.ProtocolInfo{"", nil},
+		ClientSupported:            "no",
+	}, http.StatusOK, "consumer client, no protocol"},
+
+	{"?client=consumer&protocol=v0.1", versionInfo{
+		SoftwareVersion: coreVer,
+		ClientProtocol:     protocols.ProtocolInfo{"v0.1 (current)",
+			map[string]string{"Authorizer":"v0.1", "Broker":"v0.1", "Data cache service":"v0.1", "Discovery":"v0.1", "File Transfer":"v0.1"}},
+		ClientSupported:            "yes",
+	}, http.StatusOK, "consumer client"},
+	{"?client=producer&protocol=v0.1", versionInfo{
+		SoftwareVersion:        coreVer,
+		ClientProtocol: protocols.ProtocolInfo{"v0.1 (current)",map[string]string{"Discovery":"v0.1", "Receiver":"v0.1"}},
+		ClientSupported:        "yes",
+	}, http.StatusOK, "producer client"},
+	{"?client=producer&protocol=v0.2", versionInfo{
+		SoftwareVersion:        coreVer,
+		ClientProtocol: protocols.ProtocolInfo{"v0.2 (unknown protocol)",nil},
+		ClientSupported:        "no",
+	}, http.StatusOK, "producer client unknown"},
+}
+
+func TestVersionTests(t *testing.T) {
+	for _, test := range versionTests {
+		w := doRequest("/" + version.GetDiscoveryApiVersion() + "/version" + test.request)
+		assert.Equal(t, test.code, w.Code, test.message)
+		if test.code == http.StatusOK {
+			var info versionInfo
+			json.Unmarshal(w.Body.Bytes(), &info)
+			fmt.Println(w.Body.String())
+			assert.Equal(t, test.result.ClientProtocol,info.ClientProtocol, test.message)
+			if test.message!="no client" {
+				assert.Equal(t, true,len(info.SupportedProtocols)>0, test.message)
+			}
+		}
+	}
+}
+
+func TestVersionTestsWrongApi(t *testing.T) {
+	w := doRequest("/v2.0/version")
+	assert.Equal(t, http.StatusUnsupportedMediaType, w.Code, "wrong api")
+}
diff --git a/discovery/src/asapo_discovery/server/listroutes.go b/discovery/src/asapo_discovery/server/listroutes.go
index ec6ae17371a6f688925c8a05614855a9d7f248a1..88c566d79e480a08e97a6470cb554ef6a0aa3fdc 100644
--- a/discovery/src/asapo_discovery/server/listroutes.go
+++ b/discovery/src/asapo_discovery/server/listroutes.go
@@ -9,13 +9,13 @@ var listRoutes = utils.Routes{
 	utils.Route{
 		"GetReceivers",
 		"Get",
-		"/" + common.NameReceiverService,
+		"/{apiver}/" + common.NameReceiverService,
 		routeGetReceivers,
 	},
 	utils.Route{
 		"GetBroker",
 		"Get",
-		"/asapo-broker",
+		"/{apiver}/"+common.NameBrokerService,
 		routeGetBroker,
 	},
 	utils.Route{
@@ -24,10 +24,23 @@ var listRoutes = utils.Routes{
 		"/" + common.NameMongoService,
 		routeGetMongo,
 	},
+	utils.Route{
+		"GetVersion",
+		"Get",
+		"/{apiver}/version",
+		routeGetVersion,
+	},
 	utils.Route{
 		"GetFTS",
 		"Get",
-		"/" + common.NameFtsService,
+		"/{apiver}/" + common.NameFtsService,
 		routeGetFileTransferService,
 	},
+	utils.Route{
+		"Health",
+		"Get",
+		"/health",
+		routeGetHealth,
+	},
+
 }
diff --git a/discovery/src/asapo_discovery/server/routes.go b/discovery/src/asapo_discovery/server/routes.go
new file mode 100644
index 0000000000000000000000000000000000000000..d918a8adb6db7208fe001f2bb4df96e94ea7bd06
--- /dev/null
+++ b/discovery/src/asapo_discovery/server/routes.go
@@ -0,0 +1,90 @@
+package server
+
+import (
+	"asapo_discovery/protocols"
+	"net/http"
+	"asapo_common/logger"
+	"asapo_discovery/common"
+)
+
+func getService(service string) (answer []byte, code int) {
+	var err error
+	if service == "asapo-receiver" {
+		answer, err = requestHandler.GetReceivers(settings.Receiver.UseIBAddress)
+	} else {
+		answer, err = requestHandler.GetSingleService(service)
+
+	}
+	log_str := "processing get " + service
+	if err != nil {
+		logger.Error(log_str + " - " + err.Error())
+		return []byte(err.Error()), http.StatusInternalServerError
+	}
+	logger.Debug(log_str + " -  got " + string(answer))
+	return answer, http.StatusOK
+}
+
+func validateProtocol(w http.ResponseWriter, r *http.Request, client string) bool {
+	protocol, err := extractProtocol(r)
+	log_str := "validating " + client + " protocol"
+	if err != nil {
+		w.WriteHeader(http.StatusBadRequest)
+		w.Write([]byte(err.Error()))
+		logger.Error(log_str + " - " + err.Error())
+		return false
+	}
+	if hint, ok := protocols.ValidateProtocol(client, protocol); !ok {
+		w.WriteHeader(http.StatusUnsupportedMediaType)
+		w.Write([]byte(hint))
+		logger.Error(log_str + " - " + hint)
+		return false
+	}
+	logger.Debug(log_str + " - ok")
+	return true
+}
+
+func routeGetReceivers(w http.ResponseWriter, r *http.Request) {
+	if ok := checkDiscoveryApiVersion(w, r); !ok {
+		return
+	}
+
+	if !validateProtocol(w, r, "producer") {
+		return
+	}
+	answer, code := getService(common.NameReceiverService)
+	w.WriteHeader(code)
+	w.Write(answer)
+}
+
+func routeGetBroker(w http.ResponseWriter, r *http.Request) {
+	if ok := checkDiscoveryApiVersion(w, r); !ok {
+		return
+	}
+
+	if !validateProtocol(w, r, "consumer") {
+		return
+	}
+
+	answer, code := getService(common.NameBrokerService)
+	w.WriteHeader(code)
+	w.Write(answer)
+}
+
+func routeGetMongo(w http.ResponseWriter, r *http.Request) {
+	answer, code := getService(common.NameMongoService)
+	w.WriteHeader(code)
+	w.Write(answer)
+}
+
+func routeGetFileTransferService(w http.ResponseWriter, r *http.Request) {
+	if ok := checkDiscoveryApiVersion(w, r); !ok {
+		return
+	}
+	if !validateProtocol(w, r, "consumer") {
+		return
+	}
+
+	answer, code := getService(common.NameFtsService)
+	w.WriteHeader(code)
+	w.Write(answer)
+}
diff --git a/discovery/src/asapo_discovery/server/routes_test.go b/discovery/src/asapo_discovery/server/routes_test.go
index f15fb60704580489017776c1ef40916eb669c892..e72d0c55abfba927659cf6e56f651046b504e2a0 100644
--- a/discovery/src/asapo_discovery/server/routes_test.go
+++ b/discovery/src/asapo_discovery/server/routes_test.go
@@ -1,6 +1,7 @@
 package server
 
 import (
+	"asapo_common/version"
 	"github.com/stretchr/testify/mock"
 	"github.com/stretchr/testify/suite"
 	"asapo_common/logger"
@@ -31,9 +32,9 @@ type GetServicesTestSuite struct {
 
 func (suite *GetServicesTestSuite) SetupTest() {
 	requestHandler = new(request_handler.StaticRequestHandler)
-	var s common.Settings= common.Settings{Receiver:common.ReceiverInfo{MaxConnections:10,StaticEndpoints:[]string{"ip1","ip2"}},
-	Broker:common.BrokerInfo{StaticEndpoint:"ip_broker"},Mongo:common.MongoInfo{StaticEndpoint:"ip_mongo"},
-		FileTransferService:common.FtsInfo{StaticEndpoint:"ip_fts"}}
+	var s common.Settings = common.Settings{Receiver: common.ReceiverInfo{MaxConnections: 10, StaticEndpoints: []string{"ip1", "ip2"}},
+		Broker: common.BrokerInfo{StaticEndpoint: "ip_broker"}, Mongo: common.MongoInfo{StaticEndpoint: "ip_mongo"},
+		FileTransferService: common.FtsInfo{StaticEndpoint: "ip_fts"}}
 
 	requestHandler.Init(s)
 	logger.SetMockLog()
@@ -58,27 +59,63 @@ func (suite *GetServicesTestSuite) TestWrongPath() {
 	suite.Equal(http.StatusNotFound, w.Code, "wrong path")
 }
 
-func (suite *GetServicesTestSuite) TestGetReceivers() {
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameReceiverService)))
-
-	w := doRequest("/asapo-receiver")
-
-	suite.Equal(http.StatusOK, w.Code, "code ok")
-	suite.Equal(w.Body.String(), "{\"MaxConnections\":10,\"Uris\":[\"ip1\",\"ip2\"]}", "result")
-	assertExpectations(suite.T())
+type requestTest struct {
+request string
+code int
+message string
 }
 
+var receiverTests = []requestTest {
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver",http.StatusBadRequest,"protocol missing"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v0.2",http.StatusUnsupportedMediaType,"wrong protocol"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-receiver?protocol=v0.1",http.StatusOK,"ok"},
+}
 
-func (suite *GetServicesTestSuite) TestGetBroker() {
-	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameBrokerService)))
+func (suite *GetServicesTestSuite) TestGetReceivers() {
+	for _,test:= range receiverTests {
+		if test.code == http.StatusOK {
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating producer")))
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameReceiverService)))
+		} else {
+			logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("validating producer")))
+		}
+
+		w := doRequest(test.request)
+
+		suite.Equal(test.code, w.Code, test.message)
+		if test.code == http.StatusOK {
+			suite.Equal(w.Body.String(), "{\"MaxConnections\":10,\"Uris\":[\"ip1\",\"ip2\"]}", "result")
+		}
+		assertExpectations(suite.T())
+	}
 
-	w := doRequest("/asapo-broker")
+}
 
-	suite.Equal(http.StatusOK, w.Code, "code ok")
-	suite.Equal(w.Body.String(), "ip_broker", "result")
-	assertExpectations(suite.T())
+var brokerTests = []requestTest {
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker",http.StatusBadRequest,"protocol missing"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v0.2",http.StatusUnsupportedMediaType,"wrong protocol"},
+	{"/" + version.GetDiscoveryApiVersion()+"/asapo-broker?protocol=v0.1",http.StatusOK,"ok"},
+}
+func (suite *GetServicesTestSuite) TestGetBroker() {
+	for _,test:= range brokerTests {
+		if test.code == http.StatusOK {
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating consumer")))
+			logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameBrokerService)))
+		} else {
+			logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("validating consumer")))
+		}
+
+		w := doRequest(test.request)
+
+		suite.Equal(test.code, w.Code, test.message)
+		if test.code == http.StatusOK {
+			suite.Equal(w.Body.String(), "ip_broker", "result")
+		}
+		assertExpectations(suite.T())
+	}
 }
 
+
 func (suite *GetServicesTestSuite) TestGetMongo() {
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameMongoService)))
 
@@ -91,10 +128,23 @@ func (suite *GetServicesTestSuite) TestGetMongo() {
 
 func (suite *GetServicesTestSuite) TestGetFts() {
 	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get "+common.NameFtsService)))
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("validating")))
 
-	w := doRequest("/asapo-file-transfer")
+	w := doRequest("/" + version.GetDiscoveryApiVersion()+"/asapo-file-transfer?protocol=v0.1")
 
 	suite.Equal(http.StatusOK, w.Code, "code ok")
 	suite.Equal(w.Body.String(), "ip_fts", "result")
 	assertExpectations(suite.T())
 }
+
+func (suite *GetServicesTestSuite) TestGetVersions() {
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get version")))
+
+	w := doRequest("/" + version.GetDiscoveryApiVersion() + "/version")
+
+	suite.Equal(http.StatusOK, w.Code, "code ok")
+	// we dont really check what it returns, just that route is ok
+	suite.Contains(w.Body.String(), version.GetVersion(), "core version")
+	suite.Contains(w.Body.String(), "supportedProtocols", "protocols")
+	assertExpectations(suite.T())
+}
diff --git a/examples/consumer/getnext/check_linux.sh b/examples/consumer/getnext/check_linux.sh
index 21c2bf6842cd9a06edd94e70c5d02a826b99e4f6..2c853792cf60bc9a171c6664a2a695ca71c0791f 100644
--- a/examples/consumer/getnext/check_linux.sh
+++ b/examples/consumer/getnext/check_linux.sh
@@ -4,7 +4,7 @@ source_path=dummy
 beamtime_id=test_run
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+token_test_run=$BT_TEST_RUN_TOKEN
 
 set -e
 
@@ -15,12 +15,14 @@ Cleanup() {
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop discovery
+    nomad stop authorizer
     nomad stop broker
-	echo "db.dropDatabase()" | mongo ${database_name}
+  	echo "db.dropDatabase()" | mongo ${database_name}
 }
 
 nomad run nginx.nmd
 nomad run discovery.nmd
+nomad run authorizer.nmd
 nomad run broker.nmd
 
 for i in `seq 1 3`;
diff --git a/examples/consumer/getnext/check_windows.bat b/examples/consumer/getnext/check_windows.bat
index 62b2600c6aaa80f1b4f4e794f1fbc5e7fedc04f0..1a3db40dff18fa2808fcdf3c3957eeb40f990e58 100644
--- a/examples/consumer/getnext/check_windows.bat
+++ b/examples/consumer/getnext/check_windows.bat
@@ -5,7 +5,7 @@ SET data_source=detector
 SET database_name=%beamtime_id%_%data_source%
 
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+set token_test_run=%BT_TEST_RUN_TOKEN%
 
 call start_services.bat
 
diff --git a/examples/consumer/getnext/getnext.cpp b/examples/consumer/getnext/getnext.cpp
index 5e90a3885be448262e12bbd099278deb62eb6f8b..d30e87e578210fb3776e34c18c1fd4d332ad321b 100644
--- a/examples/consumer/getnext/getnext.cpp
+++ b/examples/consumer/getnext/getnext.cpp
@@ -19,6 +19,7 @@ std::mutex lock;
 
 uint64_t file_size = 0;
 
+
 inline std::string ConnectionTypeToString(asapo::NetworkConnectionType type) {
     switch (type) {
     case asapo::NetworkConnectionType::kUndefined:
@@ -119,6 +120,7 @@ StartThreads(const Args& params, std::vector<int>* nfiles, std::vector<int>* err
             if (err) {
                 (*errors)[i] += ProcessError(err);
                 lock.unlock();
+                exit(EXIT_FAILURE);
                 return;
             }
         }
@@ -263,7 +265,6 @@ void TryGetStream(Args* args) {
 }
 
 int main(int argc, char* argv[]) {
-    asapo::ExitAfterPrintVersionIfNeeded("GetNext consumer Example", argc, argv);
     Args params;
     params.datasets = false;
     if (argc != 8 && argc != 9) {
diff --git a/examples/consumer/getnext_python/check_linux.sh b/examples/consumer/getnext_python/check_linux.sh
index a600692a552c9aa11c75ba0030af91dcaec41e1a..50c4b25cfa5364a5566b8440a6296649cb043497 100644
--- a/examples/consumer/getnext_python/check_linux.sh
+++ b/examples/consumer/getnext_python/check_linux.sh
@@ -4,7 +4,7 @@ source_path=dummy
 beamtime_id=test_run
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+token_test_run=$BT_TEST_RUN_TOKEN
 group_id=bif31l2uiddd4r0q6b40
 set -e
 
@@ -15,12 +15,14 @@ Cleanup() {
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop discovery
+    nomad stop authorizer
     nomad stop broker
 	echo "db.dropDatabase()" | mongo ${database_name}
 }
 
 nomad run nginx.nmd
 nomad run discovery.nmd
+nomad run authorizer.nmd
 nomad run broker.nmd
 
 for i in `seq 1 3`;
diff --git a/examples/consumer/getnext_python/check_windows.bat b/examples/consumer/getnext_python/check_windows.bat
index c546f6108686ab0f4ca55632d599f2711e222e3f..9b96ec00e6ddc2373237820eb86ee333f0f2b7b0 100644
--- a/examples/consumer/getnext_python/check_windows.bat
+++ b/examples/consumer/getnext_python/check_windows.bat
@@ -4,7 +4,7 @@ SET data_source=detector
 SET database_name=%beamtime_id%_%data_source%
 
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+set token_test_run=%BT_TEST_RUN_TOKEN%
 set group_id=bif31l2uiddd4r0q6b40
 
 call start_services.bat
diff --git a/examples/pipeline/in_to_out/check_linux.sh b/examples/pipeline/in_to_out/check_linux.sh
index 8540d411d14f789d3902a71f2ace9a24c4c5b1d8..120bad4d0fc3a2e95e40fe9397e154f98bda7855 100644
--- a/examples/pipeline/in_to_out/check_linux.sh
+++ b/examples/pipeline/in_to_out/check_linux.sh
@@ -10,7 +10,8 @@ indatabase_name=${beamtime_id}_${data_source_in}
 outdatabase_name=${beamtime_id}_${data_source_out}
 outdatabase_name2=${beamtime_id}_${data_source_out2}
 
-token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+#asapo_test read token
+token=$ASAPO_TEST_RW_TOKEN
 
 beamline=test
 receiver_root_folder=/tmp/asapo/receiver/files
@@ -18,8 +19,6 @@ facility=test_facility
 year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 
-
-
 set -e
 
 trap Cleanup EXIT
diff --git a/examples/pipeline/in_to_out/check_windows.bat b/examples/pipeline/in_to_out/check_windows.bat
index 5b0f9bb514ae45e2f867a6e88debf5e3ab4c2856..57485dbe442e4ae171f7e63641a69aa5a31a4d5a 100644
--- a/examples/pipeline/in_to_out/check_windows.bat
+++ b/examples/pipeline/in_to_out/check_windows.bat
@@ -8,7 +8,7 @@ SET indatabase_name=%beamtime_id%_%data_source_in%
 SET outdatabase_name=%beamtime_id%_%data_source_out%
 SET outdatabase_name2=%beamtime_id%_%data_source_out2%
 
-SET token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+SET token=%ASAPO_TEST_RW_TOKEN%
 
 SET beamline=test
 
diff --git a/examples/pipeline/in_to_out/in_to_out.cpp b/examples/pipeline/in_to_out/in_to_out.cpp
index 801d9f32a0653776f4a2093a2a3d545bcad19dc3..e96985a02266e9a7e5fa3bde748e0ba6361e3478 100644
--- a/examples/pipeline/in_to_out/in_to_out.cpp
+++ b/examples/pipeline/in_to_out/in_to_out.cpp
@@ -206,7 +206,6 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args &args) {
 }
 
 int main(int argc, char* argv[]) {
-    asapo::ExitAfterPrintVersionIfNeeded("GetNext consumer Example", argc, argv);
     Args args;
     if (argc != 11) {
         std::cout << "Usage: " + std::string{argv[0]}
diff --git a/examples/pipeline/in_to_out_python/CMakeLists.txt b/examples/pipeline/in_to_out_python/CMakeLists.txt
index cebedb3f70ecaab5e94677e78d192db8e1a8a8a8..bdac25363e1a04cf6afa0b7b7ca85ccaaf3feed3 100644
--- a/examples/pipeline/in_to_out_python/CMakeLists.txt
+++ b/examples/pipeline/in_to_out_python/CMakeLists.txt
@@ -13,4 +13,3 @@ endif()
 file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/in_to_out.py TEST_SCRIPT )
 
 add_script_test("${TARGET_NAME}" "${Python_EXECUTABLE} ${PYTHON_LIBS_CONSUMER} ${PYTHON_LIBS_PRODUCER} ${TEST_SCRIPT} " nomem)
-
diff --git a/examples/pipeline/in_to_out_python/check_linux.sh b/examples/pipeline/in_to_out_python/check_linux.sh
index 444c6ceeac2c5783d5611cd4490eb768b9702ad8..12f7444f8c7462af63d647efb8eb1ebed74c4ec1 100644
--- a/examples/pipeline/in_to_out_python/check_linux.sh
+++ b/examples/pipeline/in_to_out_python/check_linux.sh
@@ -12,7 +12,8 @@ nthreads=4
 indatabase_name=${beamtime_id}_${data_source_in}
 outdatabase_name=${beamtime_id}_${data_source_out}
 
-token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+#asapo_test read token
+token=$ASAPO_TEST_RW_TOKEN
 
 beamline=test
 receiver_root_folder=/tmp/asapo/receiver/files
diff --git a/examples/pipeline/in_to_out_python/check_windows.bat b/examples/pipeline/in_to_out_python/check_windows.bat
index 3160af194c3d56c15a3c943700d2541e1bb2c896..ff761ad8262930e138399b75e106c77b25c0e89a 100644
--- a/examples/pipeline/in_to_out_python/check_windows.bat
+++ b/examples/pipeline/in_to_out_python/check_windows.bat
@@ -6,7 +6,7 @@ SET data_source_out=simulation
 SET indatabase_name=%beamtime_id%_%data_source_in%
 SET outdatabase_name=%beamtime_id%_%data_source_out%
 
-SET token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+SET token=%ASAPO_TEST_RW_TOKEN%
 
 SET beamline=test
 
diff --git a/examples/producer/dummy-data-producer/dummy_data_producer.cpp b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
index d983f01e2d419a5b5ca0356e746d4a64d16b54be..560cd20ba6b9f89aa52d40deae4cab08319f2666 100644
--- a/examples/producer/dummy-data-producer/dummy_data_producer.cpp
+++ b/examples/producer/dummy-data-producer/dummy_data_producer.cpp
@@ -69,7 +69,6 @@ void TryGetDataSourceAndToken(Args* args) {
 
 
 void ProcessCommandArguments(int argc, char* argv[], Args* args) {
-    asapo::ExitAfterPrintVersionIfNeeded("Dummy Data Producer", argc, argv);
     if (argc != 8 && argc != 9) {
         std::cout <<
                   "Usage: " << argv[0] <<
diff --git a/file_transfer/CMakeLists.txt b/file_transfer/CMakeLists.txt
index 9dee3c150e8b12f1fa60d089dbb29b7d02d04196..b6d2875b165ad833c414930f1f59faf9000b2ab4 100644
--- a/file_transfer/CMakeLists.txt
+++ b/file_transfer/CMakeLists.txt
@@ -1,20 +1,8 @@
 set (TARGET_NAME asapo-file-transfer)
 
-if (NOT "$ENV{GOPATH}" STREQUAL "")
-	set(GOPATH $ENV{GOPATH})
-endif()
-
-if (NOT GOPATH)
-    message (FATAL_ERROR "GOPATH not set")
-endif()
-
-message(STATUS "global gopath ${GOPATH}")
-
 IF(WIN32)
-    set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go")
     set (exe_name "${TARGET_NAME}.exe")
 ELSE()
-    set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go)
     set (exe_name "${TARGET_NAME}")
 ENDIF()
 
@@ -23,8 +11,8 @@ include(testing_go)
 configure_file(docker/Dockerfile . COPYONLY)
 
 add_custom_target(asapo-file-transfer ALL
-    COMMAND  ${CMAKE_COMMAND} -E env GOPATH=${gopath}
-    go build ${GO_OPTS} -o ${exe_name} asapo_file_transfer/main
+    COMMAND go build ${GO_OPTS} -o ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} main/file_transfer.go
+    WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_file_transfer
     VERBATIM)
 define_property(TARGET PROPERTY EXENAME
         BRIEF_DOCS <executable name>
@@ -32,4 +20,4 @@ define_property(TARGET PROPERTY EXENAME
 
 set_target_properties(asapo-file-transfer PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name})
 
-gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}" "./...")
+gotest(${TARGET_NAME}  "${CMAKE_CURRENT_SOURCE_DIR}/src/asapo_file_transfer" "./...")
diff --git a/file_transfer/src/asapo_file_transfer/go.mod b/file_transfer/src/asapo_file_transfer/go.mod
new file mode 100644
index 0000000000000000000000000000000000000000..f7d1e31423978ab00347ff894ead26751d5a81a0
--- /dev/null
+++ b/file_transfer/src/asapo_file_transfer/go.mod
@@ -0,0 +1,13 @@
+module asapo_file_transfer
+
+go 1.16
+
+replace asapo_common v0.0.0 => ../../../common/go/src/asapo_common
+
+require (
+	asapo_common v0.0.0
+	github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
+	github.com/gorilla/mux v1.8.0 // indirect
+	github.com/sirupsen/logrus v1.8.0 // indirect
+	github.com/stretchr/testify v1.7.0
+)
diff --git a/file_transfer/src/asapo_file_transfer/go.sum b/file_transfer/src/asapo_file_transfer/go.sum
new file mode 100644
index 0000000000000000000000000000000000000000..6f35f25f5853eb59ed8eb5b781b6410839850826
--- /dev/null
+++ b/file_transfer/src/asapo_file_transfer/go.sum
@@ -0,0 +1,24 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/magefile/mage v1.10.0 h1:3HiXzCUY12kh9bIuyXShaVe529fJfyqoVM42o/uom2g=
+github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU=
+github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A=
+github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/file_transfer/src/asapo_file_transfer/server/listroutes.go b/file_transfer/src/asapo_file_transfer/server/listroutes.go
index 03fba92d86e41b3418eba91878c4253cb5ffb2f0..54188582ae387e1c61118539ad835eebba1ed36d 100644
--- a/file_transfer/src/asapo_file_transfer/server/listroutes.go
+++ b/file_transfer/src/asapo_file_transfer/server/listroutes.go
@@ -8,7 +8,7 @@ var listRoutes = utils.Routes{
 	utils.Route{
 		"Transfer File",
 		"POST",
-		"/transfer",
+		"/{apiver}/transfer",
 		routeFileTransfer,
 	},
 	utils.Route{
diff --git a/file_transfer/src/asapo_file_transfer/server/transfer.go b/file_transfer/src/asapo_file_transfer/server/transfer.go
index 9d7b76a0f5b201fabd9f30bec26dff10803ea7bd..8e6817007b75dc25ab7b589814c13a0788ea42f6 100644
--- a/file_transfer/src/asapo_file_transfer/server/transfer.go
+++ b/file_transfer/src/asapo_file_transfer/server/transfer.go
@@ -2,7 +2,9 @@ package server
 
 import (
 	log "asapo_common/logger"
+	"asapo_common/structs"
 	"asapo_common/utils"
+	"asapo_common/version"
 	"encoding/json"
 	"errors"
 	"net/http"
@@ -24,8 +26,8 @@ func Exists(name string) bool {
 
 
 func checkClaim(r *http.Request,request* fileTransferRequest) (int,error) {
-	var extraClaim utils.FolderTokenTokenExtraClaim
-	if err := utils.JobClaimFromContext(r, &extraClaim); err != nil {
+	var extraClaim structs.FolderTokenTokenExtraClaim
+	if err := utils.JobClaimFromContext(r, nil, &extraClaim); err != nil {
 		return http.StatusInternalServerError,err
 	}
 	if extraClaim.RootFolder!=request.Folder {
@@ -87,7 +89,17 @@ func serveFileSize(w http.ResponseWriter, r *http.Request, fullName string) {
 	w.Write(b)
 }
 
+
+func checkFtsApiVersion(w http.ResponseWriter, r *http.Request) bool {
+	_, ok := utils.PrecheckApiVersion(w, r, version.GetFtsApiVersion())
+	return ok
+}
+
 func routeFileTransfer(w http.ResponseWriter, r *http.Request) {
+	if ok := checkFtsApiVersion(w, r); !ok {
+		return
+	}
+
 	fullName, status,err := checkRequest(r);
 	if err != nil {
 		utils.WriteServerError(w,err,status)
diff --git a/file_transfer/src/asapo_file_transfer/server/transfer_test.go b/file_transfer/src/asapo_file_transfer/server/transfer_test.go
index 68be2f18dfa761419445c73bd02499a49c8184b5..378426407d26265af17b2ee6e522d023a540b207 100644
--- a/file_transfer/src/asapo_file_transfer/server/transfer_test.go
+++ b/file_transfer/src/asapo_file_transfer/server/transfer_test.go
@@ -1,6 +1,7 @@
 package server
 
 import (
+	"asapo_common/structs"
 	"asapo_common/utils"
 	"github.com/stretchr/testify/assert"
 	"net/http"
@@ -34,10 +35,10 @@ func prepareToken(folder string) string{
 	auth := utils.NewJWTAuth("key")
 
 	var claims utils.CustomClaims
-	var extraClaim utils.FolderTokenTokenExtraClaim
+	var extraClaim structs.FolderTokenTokenExtraClaim
 	extraClaim.RootFolder = folder
 	claims.ExtraClaims = &extraClaim
-	claims.Duration = time.Duration(1) * time.Minute
+	claims.SetExpiration(time.Duration(1) * time.Minute)
 	token,_ := auth.GenerateToken(&claims)
 	return token
 }
@@ -71,7 +72,7 @@ func TestTransferFile(t *testing.T) {
 
 	for _, test := range transferFileTests {
 		request :=  makeRequest(fileTransferRequest{test.folder,test.fname})
-		w := doPostRequest("/transfer",request,test.token)
+		w := doPostRequest("/v0.1/transfer",request,test.token)
 		if test.status==http.StatusOK {
 			body, _ := ioutil.ReadAll(w.Body)
 			body_str:=string(body)
@@ -91,7 +92,7 @@ func TestTransferFileSize(t *testing.T) {
 
 	test:=transferFileTests[0]
 		request :=  makeRequest(fileTransferRequest{test.folder,test.fname})
-		w := doPostRequest("/transfer?sizeonly=true",request,test.token)
+		w := doPostRequest("/v0.1/transfer?sizeonly=true",request,test.token)
 		if test.status==http.StatusOK {
 			body, _ := ioutil.ReadAll(w.Body)
 			body_str:=string(body)
@@ -100,3 +101,11 @@ func TestTransferFileSize(t *testing.T) {
 		}
 		assert.Equal(t, test.status, w.Code, test.message)
 }
+
+
+func TestTransferWrongApiVersion(t *testing.T) {
+	request :=  makeRequest(fileTransferRequest{"folder","fname"})
+	token := prepareToken("folder")
+	w := doPostRequest("/v0.2/transfer?sizeonly=true",request,token)
+	assert.Equal(t, http.StatusUnsupportedMediaType, w.Code, "wrong api version")
+}
diff --git a/producer/api/cpp/CMakeLists.txt b/producer/api/cpp/CMakeLists.txt
index 896c05265d55a5b9c9cea5fbfc75fc877d8716d0..24b5722350e4392b898e3d4b350055184c8b8c4b 100644
--- a/producer/api/cpp/CMakeLists.txt
+++ b/producer/api/cpp/CMakeLists.txt
@@ -14,7 +14,7 @@ set(SOURCE_FILES
 # Library
 ################################
 add_library(${TARGET_NAME} STATIC ${SOURCE_FILES} $<TARGET_OBJECTS:system_io> $<TARGET_OBJECTS:logger> $<TARGET_OBJECTS:json_parser>
-        $<TARGET_OBJECTS:curl_http_client> $<TARGET_OBJECTS:request_pool> $<TARGET_OBJECTS:data_structs>)
+        $<TARGET_OBJECTS:curl_http_client> $<TARGET_OBJECTS:request_pool> $<TARGET_OBJECTS:data_structs> $<TARGET_OBJECTS:version>)
 target_include_directories(${TARGET_NAME} PUBLIC include ${ASAPO_CXX_COMMON_INCLUDE_DIR})
 target_link_libraries(${TARGET_NAME} ${CURL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
 
diff --git a/producer/api/cpp/include/asapo/asapo_producer.h b/producer/api/cpp/include/asapo/asapo_producer.h
index 1e5ea176cd474cfb25e98e0523ea16ee0ac0418b..152abe230de7e0fe2f7604c55d0f7f3885fccb76 100644
--- a/producer/api/cpp/include/asapo/asapo_producer.h
+++ b/producer/api/cpp/include/asapo/asapo_producer.h
@@ -2,7 +2,6 @@
 #define ASAPO_ASAPO_PRODUCER_H
 
 #include "asapo/common/io_error.h"
-#include "asapo/common/version.h"
 
 #include "asapo/producer/producer.h"
 #include "asapo/producer/producer_error.h"
diff --git a/producer/api/cpp/include/asapo/producer/producer.h b/producer/api/cpp/include/asapo/producer/producer.h
index c8de7d637ff86f09f2f976e52f792e23a54cc816..a2fde18ebeeb2f3ae0a02784bf6f5ccf39382bc3 100644
--- a/producer/api/cpp/include/asapo/producer/producer.h
+++ b/producer/api/cpp/include/asapo/producer/producer.h
@@ -23,6 +23,15 @@ class Producer {
 
     virtual ~Producer() = default;
 
+  //! Return version
+  /*!
+    \param client_info - for client version
+    \param server_info - for server
+    \param supported - set to true if client is supported by server
+    \return nullptr of command was successful, otherwise error.
+  */
+  virtual Error GetVersionInfo(std::string* client_info,std::string* server_info, bool* supported) const = 0;
+
     //! Get stream information from receiver
     /*!
       \param stream - stream to send messages to
diff --git a/producer/api/cpp/include/asapo/producer/producer_error.h b/producer/api/cpp/include/asapo/producer/producer_error.h
index 2bcc86593fe5cb37f0cab5145a1ddce1a5dc5a66..6f8e66ea926a60bbba909809893e350171c2637e 100644
--- a/producer/api/cpp/include/asapo/producer/producer_error.h
+++ b/producer/api/cpp/include/asapo/producer/producer_error.h
@@ -2,6 +2,7 @@
 #define ASAPO_PRODUCER_ERROR_H
 
 #include "asapo/common/error.h"
+#include "asapo/common/data_structs.h"
 
 namespace asapo {
 
@@ -12,11 +13,18 @@ enum class ProducerErrorType {
     kWrongInput,
     kServerWarning,
     kReAuthorizationNeeded,
+    kUnsupportedClient,
     kTimeout
 };
 
 using ProducerErrorTemplate = ServiceErrorTemplate<ProducerErrorType, ErrorType::kProducerError>;
 
+class OriginalData : public CustomErrorData {
+ public:
+  MessageData data;
+};
+
+
 namespace ProducerErrorTemplates {
 
 auto const kServerWarning = ProducerErrorTemplate {
@@ -47,6 +55,12 @@ auto const kReAuthorizationNeeded = ProducerErrorTemplate {
     "reauthorization needed", ProducerErrorType::kReAuthorizationNeeded
 };
 
+auto const kUnsupportedClient = ProducerErrorTemplate {
+    "cannot connect to asapo", ProducerErrorType::kUnsupportedClient
+};
+
+
+
 
 
 };
diff --git a/producer/api/cpp/src/producer_impl.cpp b/producer/api/cpp/src/producer_impl.cpp
index 0ed76a1b699e4a3335eba9812ed8019b7306c802..2a4d38a5b11699300300d77e93db4992ae84ce23 100644
--- a/producer/api/cpp/src/producer_impl.cpp
+++ b/producer/api/cpp/src/producer_impl.cpp
@@ -1,27 +1,24 @@
 #include <iostream>
-#include <iostream>
 #include <cstring>
 #include <future>
 
 #include "producer_impl.h"
 #include "producer_logger.h"
-#include "asapo/io/io_factory.h"
 #include "asapo/producer/producer_error.h"
 #include "producer_request_handler_factory.h"
 #include "producer_request.h"
 #include "asapo/common/data_structs.h"
-
+#include "asapo/request/request_pool_error.h"
+#include "asapo/http_client/http_client.h"
+#include "asapo/common/internal/version.h"
 
 namespace  asapo {
 
 const size_t ProducerImpl::kDiscoveryServiceUpdateFrequencyMs = 10000; // 10s
-const std::string ProducerImpl::kFinishStreamKeyword = "asapo_finish_stream";
-const std::string ProducerImpl::kNoNextStreamKeyword = "asapo_no_next";
-
 
 ProducerImpl::ProducerImpl(std::string endpoint, uint8_t n_processing_threads, uint64_t timeout_ms,
                            asapo::RequestHandlerType type):
-    log__{GetDefaultProducerLogger()}, timeout_ms_{timeout_ms} {
+    log__{GetDefaultProducerLogger()},httpclient__{DefaultHttpClient()}, timeout_ms_{timeout_ms},endpoint_{endpoint} {
     switch (type) {
     case RequestHandlerType::kTcp:
         discovery_service_.reset(new ReceiverDiscoveryService{endpoint, ProducerImpl::kDiscoveryServiceUpdateFrequencyMs});
@@ -96,6 +93,48 @@ Error CheckProducerRequest(const MessageHeader& message_header, uint64_t ingest_
     return CheckIngestMode(ingest_mode);
 }
 
+Error HandleErrorFromPool(Error original_error,bool manage_data_memory) {
+    if (original_error == nullptr) {
+        return nullptr;
+    }
+    Error producer_error = ProducerErrorTemplates::kRequestPoolIsFull.Generate(original_error->Explain());
+    auto err_data = static_cast<OriginalRequest*>(original_error->GetCustomData());
+    if (!err_data) {
+        return producer_error;
+    }
+    auto producer_request = static_cast<ProducerRequest*>(err_data->request.get());
+    if (!producer_request) {
+        return producer_error;
+    }
+    MessageData original_data = std::move(producer_request->data);
+    if (original_data == nullptr) {
+        return producer_error;
+    }
+    if (!manage_data_memory) {
+        original_data.release();
+    } else {
+        OriginalData* original = new asapo::OriginalData{};
+        original->data = std::move(original_data);
+        producer_error->SetCustomData(std::unique_ptr<asapo::CustomErrorData>{original});
+    }
+    return producer_error;
+}
+
+Error HandleInputError(Error original_error,MessageData data, bool manage_data_memory) {
+    if (data == nullptr) {
+        return original_error;
+    }
+    if (!manage_data_memory) {
+        data.release();
+        return original_error;
+    }
+
+    OriginalData* original = new asapo::OriginalData{};
+    original->data = std::move(data);
+    original_error->SetCustomData(std::unique_ptr<asapo::CustomErrorData>{original});
+    return original_error;
+}
+
 Error ProducerImpl::Send(const MessageHeader& message_header,
                          std::string stream,
                          MessageData data,
@@ -105,19 +144,17 @@ Error ProducerImpl::Send(const MessageHeader& message_header,
                          bool manage_data_memory) {
     auto err = CheckProducerRequest(message_header, ingest_mode, stream);
     if (err) {
-        if (!manage_data_memory) {
-            data.release();
-        }
         log__->Error("error checking request - " + err->Explain());
-        return err;
+        return HandleInputError(std::move(err),std::move(data),manage_data_memory);
     }
 
     auto request_header = GenerateNextSendRequest(message_header, std::move(stream), ingest_mode);
 
-    return request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(request_header),
+    err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(request_header),
                 std::move(data), std::move(message_header.user_metadata), std::move(full_path), callback, manage_data_memory, timeout_ms_}
     });
 
+    return HandleErrorFromPool(std::move(err),manage_data_memory);
 }
 
 bool WandTransferData(uint64_t ingest_mode) {
@@ -142,7 +179,7 @@ Error ProducerImpl::Send(const MessageHeader &message_header,
                          std::string stream,
                          RequestCallback callback) {
     if (auto err = CheckData(ingest_mode, message_header, &data)) {
-        return err;
+        return HandleInputError(std::move(err),std::move(data),true);
     }
     return Send(message_header, std::move(stream), std::move(data), "", ingest_mode, callback, true);
 
@@ -214,9 +251,10 @@ Error ProducerImpl::SendMetadata(const std::string& metadata, RequestCallback ca
     request_header.custom_data[kPosIngestMode] = asapo::IngestModeFlags::kTransferData | asapo::IngestModeFlags::kStoreInDatabase;
     MessageData data{new uint8_t[metadata.size()]};
     strncpy((char*)data.get(), metadata.c_str(), metadata.size());
-    return request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(request_header),
+    auto err = request_pool__->AddRequest(std::unique_ptr<ProducerRequest> {new ProducerRequest{source_cred_string_, std::move(request_header),
                 std::move(data), "", "", callback, true, timeout_ms_}
     });
+    return HandleErrorFromPool(std::move(err), true);
 }
 
 Error ProducerImpl::Send__(const MessageHeader &message_header,
@@ -278,7 +316,7 @@ void ActivatePromise(std::shared_ptr<std::promise<StreamInfoResult>> promise, Re
                      Error err) {
     StreamInfoResult res;
     if (err == nullptr) {
-        auto ok = res.sinfo.SetFromJson(payload.response,true);
+        auto ok = res.sinfo.SetFromJson(payload.response);
         res.err = ok ? nullptr : ProducerErrorTemplates::kInternalServerError.Generate(
                       std::string("cannot read JSON string from server response: ") + payload.response).release();
     } else {
@@ -354,5 +392,32 @@ void ProducerImpl::SetRequestsQueueLimits(uint64_t size, uint64_t volume) {
     request_pool__->SetLimits(RequestPoolLimits{size,volume});
 }
 
+Error ProducerImpl::GetVersionInfo(std::string* client_info, std::string* server_info, bool* supported) const {
+    if (client_info == nullptr && server_info == nullptr && supported == nullptr) {
+        return ProducerErrorTemplates::kWrongInput.Generate("missing parameters");
+    }
+    if (client_info != nullptr) {
+        *client_info =
+            "software version: " + std::string(kVersion) + ", producer protocol: " + kProducerProtocol.GetVersion();
+    }
+
+    if (server_info != nullptr || supported != nullptr) {
+        return GetServerVersionInfo(server_info, supported);
+    }
+    return nullptr;
+}
+
+Error ProducerImpl::GetServerVersionInfo(std::string* server_info,
+                                         bool* supported) const {
+    auto endpoint = endpoint_ +"/asapo-discovery/"+kProducerProtocol.GetDiscoveryVersion()+
+        "/version?client=producer&protocol="+kProducerProtocol.GetVersion();
+    HttpCode  code;
+    Error err;
+    auto response = httpclient__->Get(endpoint, &code, &err);
+    if (err) {
+        return err;
+    }
+    return ExtractVersionFromResponse(response,"producer",server_info,supported);
+}
 
 }
\ No newline at end of file
diff --git a/producer/api/cpp/src/producer_impl.h b/producer/api/cpp/src/producer_impl.h
index 9108a20dad3bdbbc6f622c268a0a74e930dbe66f..53fda3df0a07508efce6dc7f7ef891af2601bf4a 100644
--- a/producer/api/cpp/src/producer_impl.h
+++ b/producer/api/cpp/src/producer_impl.h
@@ -24,14 +24,15 @@ class ProducerImpl : public Producer {
   std::unique_ptr<RequestHandlerFactory> request_handler_factory_;
  public:
   static const size_t kDiscoveryServiceUpdateFrequencyMs;
-  static const std::string kFinishStreamKeyword;
-  static const std::string kNoNextStreamKeyword;
 
   explicit ProducerImpl(std::string endpoint, uint8_t n_processing_threads, uint64_t timeout_ms,
                         asapo::RequestHandlerType type);
   ProducerImpl(const ProducerImpl &) = delete;
   ProducerImpl &operator=(const ProducerImpl &) = delete;
 
+
+  Error GetVersionInfo(std::string* client_info,std::string* server_info, bool* supported) const override;
+
   StreamInfo GetStreamInfo(std::string stream, uint64_t timeout_ms, Error* err) const override;
   StreamInfo GetLastStream(uint64_t timeout_ms, Error* err) const override;
 
@@ -58,6 +59,7 @@ class ProducerImpl : public Producer {
                                   RequestCallback callback) override;
 
   AbstractLogger* log__;
+  std::unique_ptr<HttpClient> httpclient__;
   std::unique_ptr<RequestPool> request_pool__;
 
   Error SetCredentials(SourceCredentials source_cred) override;
@@ -76,6 +78,9 @@ class ProducerImpl : public Producer {
                                                uint64_t ingest_mode);
   std::string source_cred_string_;
   uint64_t timeout_ms_;
+  std::string endpoint_;
+  Error GetServerVersionInfo(std::string* server_info,
+                             bool* supported) const;
 };
 
 struct StreamInfoResult {
diff --git a/producer/api/cpp/src/producer_request.cpp b/producer/api/cpp/src/producer_request.cpp
index 73a64c2a61052aae9d9cc68f4c717b77591e99ef..7d41d0d44c1311674214c1d72bf72e0955d5f044 100644
--- a/producer/api/cpp/src/producer_request.cpp
+++ b/producer/api/cpp/src/producer_request.cpp
@@ -1,5 +1,6 @@
 #include <asapo/asapo_producer.h>
 #include "producer_request.h"
+#include "asapo/common/internal/version.h"
 
 namespace asapo {
 
@@ -24,6 +25,13 @@ ProducerRequest::ProducerRequest(std::string source_credentials,
     original_filepath{std::move(original_filepath)},
     callback{callback},
     manage_data_memory{manage_data_memory} {
+
+    if (kProducerProtocol.GetReceiverVersion().size()<kMaxVersionSize) {
+        strcpy(header.api_version, kProducerProtocol.GetReceiverVersion().c_str());
+    } else {
+        strcpy(header.api_version, "v0.0");
+    }
+
 }
 
 bool ProducerRequest::NeedSend() const {
diff --git a/producer/api/cpp/src/receiver_discovery_service.cpp b/producer/api/cpp/src/receiver_discovery_service.cpp
index 69b6ace5338ae8ec7d07114c85feb9b99a1c060a..b1130b7ec6043a9635c2336c57de719967e48eda 100644
--- a/producer/api/cpp/src/receiver_discovery_service.cpp
+++ b/producer/api/cpp/src/receiver_discovery_service.cpp
@@ -1,15 +1,17 @@
 #include "receiver_discovery_service.h"
 
-#include "producer_logger.h"
-#include "asapo/json_parser/json_parser.h"
-
 #include <iostream>
 #include <algorithm>
 #include <numeric>
 
+#include "producer_logger.h"
+#include "asapo/json_parser/json_parser.h"
+#include "asapo/common/internal/version.h"
+
 namespace  asapo {
 
-const std::string ReceiverDiscoveryService::kServiceEndpointSuffix = "/asapo-discovery/asapo-receiver";
+const std::string ReceiverDiscoveryService::kServiceEndpointSuffix = "/asapo-discovery/"+kProducerProtocol.GetDiscoveryVersion()
+    +"/asapo-receiver?protocol="+kConsumerProtocol.GetVersion();
 
 ReceiverDiscoveryService::ReceiverDiscoveryService(std::string endpoint, uint64_t update_frequency_ms): httpclient__{DefaultHttpClient()},
     log__{GetDefaultProducerLogger()},
diff --git a/producer/api/cpp/src/request_handler_tcp.cpp b/producer/api/cpp/src/request_handler_tcp.cpp
index 157c1212e6b24089d7ed8ace19d7de3f2fc26da5..85895cfd06c37dcd4257ad88c2c6bbf1d5be75e4 100644
--- a/producer/api/cpp/src/request_handler_tcp.cpp
+++ b/producer/api/cpp/src/request_handler_tcp.cpp
@@ -7,27 +7,35 @@
 namespace asapo {
 
 RequestHandlerTcp::RequestHandlerTcp(ReceiverDiscoveryService* discovery_service, uint64_t thread_id,
-                                     uint64_t* shared_counter):
-    io__{GenerateDefaultIO()}, log__{GetDefaultProducerLogger()}, discovery_service__{discovery_service}, thread_id_{thread_id},
+                                     uint64_t* shared_counter) :
+    io__{GenerateDefaultIO()},
+    log__{GetDefaultProducerLogger()},
+    discovery_service__{discovery_service},
+    thread_id_{thread_id},
     ncurrent_connections_{shared_counter} {
 }
 
-Error RequestHandlerTcp::Authorize(const std::string& source_credentials) {
-    GenericRequestHeader header{kOpcodeAuthorize, 0, 0, 0, source_credentials.c_str()};
+Error RequestHandlerTcp::Authorize(const std::string &source_credentials) {
+    GenericRequestHeader header{kOpcodeAuthorize, 0, 0, source_credentials.size(), ""};
     Error err;
     io__->Send(sd_, &header, sizeof(header), &err);
-    if(err) {
+    if (err) {
         return err;
     }
+
+    io__->Send(sd_, (void*) source_credentials.c_str(), (size_t) header.meta_size, &err);
+    if (err) {
+        return err;
+    }
+
     return ReceiveResponse(header, nullptr);
 }
 
-
-Error RequestHandlerTcp::ConnectToReceiver(const std::string& source_credentials, const std::string& receiver_address) {
+Error RequestHandlerTcp::ConnectToReceiver(const std::string &source_credentials, const std::string &receiver_address) {
     Error err;
 
     sd_ = io__->CreateAndConnectIPTCPSocket(receiver_address, &err);
-    if(err != nullptr) {
+    if (err != nullptr) {
         log__->Debug("cannot connect to receiver at " + receiver_address + " - " + err->Explain());
         return err;
     }
@@ -49,11 +57,10 @@ Error RequestHandlerTcp::ConnectToReceiver(const std::string& source_credentials
 Error RequestHandlerTcp::SendRequestContent(const ProducerRequest* request) {
     Error io_error;
     io__->Send(sd_, &(request->header), sizeof(request->header), &io_error);
-    if(io_error) {
+    if (io_error) {
         return io_error;
     }
 
-
     if (request->NeedSendMetaData()) {
         io__->Send(sd_, (void*) request->metadata.c_str(), (size_t) request->header.meta_size, &io_error);
         if (io_error) {
@@ -63,9 +70,9 @@ Error RequestHandlerTcp::SendRequestContent(const ProducerRequest* request) {
 
     if (request->NeedSend()) {
         if (request->DataFromFile()) {
-            io_error = io__->SendFile(sd_,  request->original_filepath, (size_t)request->header.data_size);
+            io_error = io__->SendFile(sd_, request->original_filepath, (size_t) request->header.data_size);
         } else {
-            io__->Send(sd_, (void*) request->data.get(), (size_t)request->header.data_size, &io_error);
+            io__->Send(sd_, (void*) request->data.get(), (size_t) request->header.data_size, &io_error);
         }
         if (io_error) {
             return io_error;
@@ -75,65 +82,69 @@ Error RequestHandlerTcp::SendRequestContent(const ProducerRequest* request) {
     return nullptr;
 }
 
-Error RequestHandlerTcp::ReceiveResponse(const GenericRequestHeader& request_header, std::string* response) {
+Error RequestHandlerTcp::ReceiveResponse(const GenericRequestHeader &request_header, std::string* response) {
     Error err;
     SendResponse sendDataResponse;
     io__->Receive(sd_, &sendDataResponse, sizeof(sendDataResponse), &err);
-    if(err != nullptr) {
+    if (err != nullptr) {
         return err;
     }
 
     switch (sendDataResponse.error_code) {
-    case kNetAuthorizationError : {
-        auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
-        res_err->Append(sendDataResponse.message);
-        return res_err;
-    }
-    case kNetErrorWrongRequest : {
-        auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
-        res_err->Append(sendDataResponse.message);
-        return res_err;
-    }
-    case kNetErrorWarning: {
-        auto res_err = ProducerErrorTemplates::kServerWarning.Generate();
-        res_err->Append(sendDataResponse.message);
-        return res_err;
-    }
-    case kNetErrorReauthorize: {
-        auto res_err = ProducerErrorTemplates::kReAuthorizationNeeded.Generate();
-        return res_err;
-    }
-    case kNetErrorNoError :
-        if (response) {
-            *response = sendDataResponse.message;
+        case kNetAuthorizationError : {
+            auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
+            res_err->Append(sendDataResponse.message);
+            return res_err;
+        }
+        case kNetErrorNotSupported : {
+            auto res_err = ProducerErrorTemplates::kUnsupportedClient.Generate();
+            res_err->Append(sendDataResponse.message);
+            return res_err;
+        }
+        case kNetErrorWrongRequest : {
+            auto res_err = ProducerErrorTemplates::kWrongInput.Generate();
+            res_err->Append(sendDataResponse.message);
+            return res_err;
+        }
+        case kNetErrorWarning: {
+            auto res_err = ProducerErrorTemplates::kServerWarning.Generate();
+            res_err->Append(sendDataResponse.message);
+            return res_err;
         }
-        return nullptr;
-    default:
-        auto res_err = ProducerErrorTemplates::kInternalServerError.Generate();
-        res_err->Append(sendDataResponse.message);
-        return res_err;
+        case kNetErrorReauthorize: {
+            auto res_err = ProducerErrorTemplates::kReAuthorizationNeeded.Generate();
+            return res_err;
+        }
+        case kNetErrorNoError :
+            if (response) {
+                *response = sendDataResponse.message;
+            }
+            return nullptr;
+        default:auto res_err = ProducerErrorTemplates::kInternalServerError.Generate();
+            res_err->Append(sendDataResponse.message);
+            return res_err;
     }
 }
 
 Error RequestHandlerTcp::TrySendToReceiver(const ProducerRequest* request, std::string* response) {
     auto err = SendRequestContent(request);
-    if (err)  {
+    if (err) {
         return err;
     }
 
     err = ReceiveResponse(request->header, response);
-    if (err == nullptr || err == ProducerErrorTemplates::kServerWarning)  {
+    if (err == nullptr || err == ProducerErrorTemplates::kServerWarning) {
         log__->Debug("successfully sent data, opcode: " + std::to_string(request->header.op_code) +
-                     ", id: " + std::to_string(request->header.data_id) + " to " + connected_receiver_uri_);
-        if (err == ProducerErrorTemplates::kServerWarning ) {
-            log__->Warning("warning from server for id " + std::to_string(request->header.data_id) + ": " + err->Explain());
+            ", id: " + std::to_string(request->header.data_id) + " to " + connected_receiver_uri_);
+        if (err == ProducerErrorTemplates::kServerWarning) {
+            log__->Warning(
+                "warning from server for id " + std::to_string(request->header.data_id) + ": " + err->Explain());
         }
     }
 
     return err;
 }
 
-
 void RequestHandlerTcp::UpdateIfNewConnection() {
     if (Connected())
         return;
@@ -152,17 +163,15 @@ bool RequestHandlerTcp::UpdateReceiversList() {
 }
 
 bool RequestHandlerTcp::TimeToUpdateReceiverList() {
-    uint64_t elapsed_ms = std::chrono::duration_cast<std::chrono::milliseconds>( system_clock::now() -
-                          last_receivers_uri_update_).count();
+    uint64_t elapsed_ms = std::chrono::duration_cast<std::chrono::milliseconds>(system_clock::now() -
+        last_receivers_uri_update_).count();
     return elapsed_ms > discovery_service__->UpdateFrequency();
 }
 
-
 bool RequestHandlerTcp::Disconnected() {
     return !Connected();
 }
 
-
 bool RequestHandlerTcp::NeedRebalance() {
     if (Disconnected())
         return false;
@@ -186,23 +195,24 @@ void RequestHandlerTcp::Disconnect() {
     connected_receiver_uri_.clear();
 }
 
-bool RequestHandlerTcp::ServerError(const Error& err) {
+bool RequestHandlerTcp::ServerError(const Error &err) {
     return err != nullptr && (err != ProducerErrorTemplates::kWrongInput &&
-                              err != ProducerErrorTemplates::kLocalIOError &&
-                              err != ProducerErrorTemplates::kServerWarning
-                             );
+        err != ProducerErrorTemplates::kLocalIOError &&
+        err != ProducerErrorTemplates::kUnsupportedClient &&
+        err != ProducerErrorTemplates::kServerWarning
+    );
 }
 
-bool RequestHandlerTcp::ProcessErrorFromReceiver(const Error& error,
+bool RequestHandlerTcp::ProcessErrorFromReceiver(const Error &error,
                                                  const ProducerRequest* request,
-                                                 const std::string& receiver_uri) {
+                                                 const std::string &receiver_uri) {
     bool is_server_error = ServerError(error);
 
     if (error && error != ProducerErrorTemplates::kServerWarning) {
         Disconnect();
         std::string log_str = "cannot send data, opcode: " + std::to_string(request->header.op_code) +
-                              ", id: " + std::to_string(request->header.data_id) + " to " + receiver_uri + ": " +
-                              error->Explain();
+            ", id: " + std::to_string(request->header.data_id) + " to " + receiver_uri + ": " +
+            error->Explain();
         if (is_server_error) {
             log__->Warning(log_str + ", will try again");
         } else {
@@ -213,24 +223,27 @@ bool RequestHandlerTcp::ProcessErrorFromReceiver(const Error& error,
     return is_server_error;
 }
 
-
 void RequestHandlerTcp::ProcessRequestCallback(Error err, ProducerRequest* request, std::string response, bool* retry) {
     if (request->callback) {
-        request->callback(RequestCallbackPayload{request->header, std::move(request->data),std::move(response)}, std::move(err));
+        request->callback(RequestCallbackPayload{request->header, std::move(request->data), std::move(response)},
+                          std::move(err));
     }
     *retry = false;
 }
 
+bool ImmediateCallbackAfterError(const Error& err) {
+    return err == ProducerErrorTemplates::kWrongInput || err == ProducerErrorTemplates::kUnsupportedClient;
+}
 
 bool RequestHandlerTcp::SendToOneOfTheReceivers(ProducerRequest* request, bool* retry) {
     for (auto receiver_uri : receivers_list_) {
         if (Disconnected()) {
             auto err = ConnectToReceiver(request->source_credentials, receiver_uri);
-            if (err == ProducerErrorTemplates::kWrongInput) {
+            if (ImmediateCallbackAfterError(err)) {
                 ProcessRequestCallback(std::move(err), request, "", retry);
                 return false;
             } else {
-                if (err != nullptr ) {
+                if (err != nullptr) {
                     continue;
                 }
             }
@@ -239,7 +252,7 @@ bool RequestHandlerTcp::SendToOneOfTheReceivers(ProducerRequest* request, bool*
         std::string response;
         auto err = TrySendToReceiver(request, &response);
         bool server_error_can_retry = ProcessErrorFromReceiver(err, request, receiver_uri);
-        if (server_error_can_retry)  {
+        if (server_error_can_retry) {
             continue;
         }
 
@@ -247,13 +260,13 @@ bool RequestHandlerTcp::SendToOneOfTheReceivers(ProducerRequest* request, bool*
         ProcessRequestCallback(std::move(err), request, response, retry);
         return success;
     }
-    log__->Warning((receivers_list_.empty()?std::string("receiver list empty, "):"")+"put back to the queue, request opcode: " + std::to_string(request->header.op_code) +
-                   ", id: " + std::to_string(request->header.data_id));
+    log__->Warning((receivers_list_.empty() ? std::string("receiver list empty, ") : "")
+                       + "put back to the queue, request opcode: " + std::to_string(request->header.op_code) +
+        ", id: " + std::to_string(request->header.data_id));
     *retry = true;
     return false;
 }
 
-
 bool RequestHandlerTcp::ProcessRequestUnlocked(GenericRequest* request, bool* retry) {
     auto producer_request = static_cast<ProducerRequest*>(request);
 
@@ -297,13 +310,14 @@ void RequestHandlerTcp::TearDownProcessingRequestLocked(bool request_processed_s
 void RequestHandlerTcp::ProcessRequestTimeout(GenericRequest* request) {
     auto producer_request = static_cast<ProducerRequest*>(request);
     auto err_string = "request id:" + std::to_string(request->header.data_id) + ", opcode: " + std::to_string(
-                          request->header.op_code) + " for " + request->header.stream +
-                      " stream";
+        request->header.op_code) + " for " + request->header.stream +
+        " stream";
     log__->Error("timeout " + err_string);
 
     auto err = ProducerErrorTemplates::kTimeout.Generate(err_string);
     if (producer_request->callback) {
-        producer_request->callback(RequestCallbackPayload{request->header, std::move(producer_request->data),""}, std::move(err));
+        producer_request->callback(RequestCallbackPayload{request->header, std::move(producer_request->data), ""},
+                                   std::move(err));
     }
 
 }
diff --git a/producer/api/cpp/unittests/test_producer_impl.cpp b/producer/api/cpp/unittests/test_producer_impl.cpp
index e9ea117ee49a9dbadcf1dff210de7655688d2c6d..5219352f164def64bf0b0d1c819af0feba464c3d 100644
--- a/producer/api/cpp/unittests/test_producer_impl.cpp
+++ b/producer/api/cpp/unittests/test_producer_impl.cpp
@@ -1,5 +1,3 @@
-#pragma clang diagnostic push
-#pragma ide diagnostic ignored "InfiniteRecursion"
 #include <gtest/gtest.h>
 #include <gmock/gmock.h>
 
@@ -10,6 +8,8 @@
 #include "asapo/producer/producer_error.h"
 
 #include "../src/request_handler_tcp.h"
+#include "asapo/request/request_pool_error.h"
+#include "asapo/unittests/MockHttpClient.h"
 
 #include "mocking.h"
 
@@ -25,10 +25,13 @@ using ::testing::Ne;
 using ::testing::Mock;
 using ::testing::InSequence;
 using ::testing::HasSubstr;
+using testing::SetArgPointee;
 
 
 using asapo::RequestPool;
 using asapo::ProducerRequest;
+using asapo::MockHttpClient;
+
 
 MATCHER_P10(M_CheckSendRequest, op_code, source_credentials, metadata, file_id, file_size, message, stream,
             ingest_mode,
@@ -55,6 +58,8 @@ TEST(ProducerImpl, Constructor) {
     asapo::ProducerImpl producer{"", 4, 3600000, asapo::RequestHandlerType::kTcp};
     ASSERT_THAT(dynamic_cast<asapo::AbstractLogger*>(producer.log__), Ne(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::RequestPool*>(producer.request_pool__.get()), Ne(nullptr));
+    ASSERT_THAT(dynamic_cast<const asapo::HttpClient*>(producer.httpclient__.get()), Ne(nullptr));
+
 }
 
 class ProducerImplTests : public testing::Test {
@@ -63,7 +68,8 @@ class ProducerImplTests : public testing::Test {
   asapo::ProducerRequestHandlerFactory factory{&service};
   testing::NiceMock<asapo::MockLogger> mock_logger;
   testing::NiceMock<MockRequestPull> mock_pull{&factory, &mock_logger};
-  asapo::ProducerImpl producer{"", 1, 3600000, asapo::RequestHandlerType::kTcp};
+  std::string expected_server_uri = "test:8400";
+  asapo::ProducerImpl producer{expected_server_uri, 1, 3600000, asapo::RequestHandlerType::kTcp};
   uint64_t expected_size = 100;
   uint64_t expected_id = 10;
   uint64_t expected_dataset_id = 100;
@@ -87,9 +93,15 @@ class ProducerImplTests : public testing::Test {
   std::string expected_fullpath = "filename";
   bool expected_managed_memory = true;
   bool expected_unmanaged_memory = false;
+
+  MockHttpClient* mock_http_client;
+
   void SetUp() override {
       producer.log__ = &mock_logger;
       producer.request_pool__ = std::unique_ptr<RequestPool>{&mock_pull};
+      mock_http_client = new MockHttpClient;
+      producer.httpclient__.reset(mock_http_client);
+
   }
   void TearDown() override {
       producer.request_pool__.release();
@@ -98,17 +110,22 @@ class ProducerImplTests : public testing::Test {
 
 TEST_F(ProducerImplTests, SendReturnsError) {
     EXPECT_CALL(mock_pull, AddRequest_t(_, false)).WillOnce(Return(
-        asapo::ProducerErrorTemplates::kRequestPoolIsFull.Generate().release()));
+        asapo::IOErrorTemplates::kNoSpaceLeft.Generate().release()));
     asapo::MessageHeader message_header{1, 1, "test"};
     auto err = producer.Send(message_header, nullptr, expected_ingest_mode, "default", nullptr);
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kRequestPoolIsFull));
 }
 
 TEST_F(ProducerImplTests, ErrorIfFileNameTooLong) {
+    asapo::MessageData data = asapo::MessageData{new uint8_t[100]};
+    data[34]=12;
     std::string long_string(asapo::kMaxMessageSize + 100, 'a');
     asapo::MessageHeader message_header{1, 1, long_string};
-    auto err = producer.Send(message_header, nullptr, expected_ingest_mode, "default", nullptr);
+    auto err = producer.Send(message_header, std::move(data), expected_ingest_mode, "default", nullptr);
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
+    auto err_data = static_cast<asapo::OriginalData*>(err->GetCustomData());
+    ASSERT_THAT(err_data, Ne(nullptr));
+    ASSERT_THAT(err_data->data[34], Eq(12));
 }
 
 TEST_F(ProducerImplTests, ErrorIfStreamEmpty) {
@@ -137,6 +154,8 @@ TEST_F(ProducerImplTests, ErrorIfZeroDataSize) {
     asapo::MessageHeader message_header{1, 0, expected_fullpath};
     auto err = producer.Send(message_header, std::move(data), asapo::kDefaultIngestMode, "default", nullptr);
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kWrongInput));
+    auto err_data = static_cast<asapo::OriginalData*>(err->GetCustomData());
+    ASSERT_THAT(err_data, Ne(nullptr));
 }
 
 TEST_F(ProducerImplTests, ErrorIfNoData) {
@@ -202,7 +221,7 @@ TEST_F(ProducerImplTests, OKSendingStreamFinish) {
                                                                next_stream_meta.c_str(),
                                                                expected_id + 1,
                                                                0,
-                                                               asapo::ProducerImpl::kFinishStreamKeyword.c_str(),
+                                                               asapo::kFinishStreamKeyword.c_str(),
                                                                expected_stream,
                                                                asapo::IngestModeFlags::kTransferMetaDataOnly,
                                                                0,
@@ -226,7 +245,7 @@ TEST_F(ProducerImplTests, OKSendingStreamFinishWithNoNextStream) {
     producer.SetCredentials(expected_credentials);
 
     std::string
-        next_stream_meta = std::string("{\"next_stream\":") + "\"" + asapo::ProducerImpl::kNoNextStreamKeyword
+        next_stream_meta = std::string("{\"next_stream\":") + "\"" + asapo::kNoNextStreamKeyword
         + "\"}";
 
     EXPECT_CALL(mock_pull, AddRequest_t(M_CheckSendRequest(asapo::kOpcodeTransferData,
@@ -234,7 +253,7 @@ TEST_F(ProducerImplTests, OKSendingStreamFinishWithNoNextStream) {
                                                                next_stream_meta.c_str(),
                                                                expected_id + 1,
                                                                0,
-                                                               asapo::ProducerImpl::kFinishStreamKeyword.c_str(),
+                                                               asapo::kFinishStreamKeyword.c_str(),
                                                                expected_stream,
                                                                asapo::IngestModeFlags::kTransferMetaDataOnly,
                                                                0,
@@ -472,6 +491,50 @@ TEST_F(ProducerImplTests, GetLastStreamMakesCorerctRequest) {
     ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kTimeout));
 }
 
+
+TEST_F(ProducerImplTests, ReturnDataIfCanotAddToQueue) {
+    producer.SetCredentials(expected_credentials);
+
+    asapo::MessageData data = asapo::MessageData{new uint8_t[100]};
+    data[40] = 10;
+    asapo::OriginalRequest* original_request = new asapo::OriginalRequest{};
+
+    auto request = std::unique_ptr<ProducerRequest> {new ProducerRequest{"", asapo::GenericRequestHeader{},std::move(data), "", "", nullptr, true, 0}};
+    original_request->request = std::move(request);
+    auto pool_err = asapo::IOErrorTemplates::kNoSpaceLeft.Generate();
+    pool_err->SetCustomData(std::unique_ptr<asapo::CustomErrorData>{original_request});
+
+
+    EXPECT_CALL(mock_pull, AddRequest_t(_,_)).WillOnce(Return(
+        std::move(pool_err).release()));
+
+    asapo::MessageHeader message_header{expected_id, 0, expected_name};
+    auto err = producer.Send(message_header, std::move(data), expected_ingest_mode, expected_stream, nullptr);
+
+    auto err_data = static_cast<asapo::OriginalData*>(err->GetCustomData());
+    ASSERT_THAT(err_data, Ne(nullptr));
+
+    asapo::MessageData original_data_in_err = std::move(err_data->data);
+    ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kRequestPoolIsFull));
+    ASSERT_THAT(original_data_in_err, Ne(nullptr));
+    ASSERT_THAT(original_data_in_err[40], Eq(10));
+
 }
 
-#pragma clang diagnostic pop
\ No newline at end of file
+TEST_F(ProducerImplTests, GetVersionInfoWithServer) {
+
+    std::string result = R"({"softwareVersion":"20.03.1, build 7a9294ad","clientSupported":"no", "clientProtocol":{"versionInfo":"v0.2"}})";
+
+    EXPECT_CALL(*mock_http_client, Get_t(HasSubstr(expected_server_uri + "/asapo-discovery/v0.1/version?client=producer&protocol=v0.1"), _,_)).WillOnce(DoAll(
+        SetArgPointee<1>(asapo::HttpCode::OK),
+        SetArgPointee<2>(nullptr),
+        Return(result)));
+
+    std::string client_info,server_info;
+    auto err = producer.GetVersionInfo(&client_info,&server_info,nullptr);
+    ASSERT_THAT(err, Eq(nullptr));
+    ASSERT_THAT(server_info, HasSubstr("20.03.1"));
+    ASSERT_THAT(server_info, HasSubstr("v0.2"));
+}
+
+}
diff --git a/producer/api/cpp/unittests/test_producer_request.cpp b/producer/api/cpp/unittests/test_producer_request.cpp
index eb087cfa3b3a4dec67b92d1626c7b0bb58d47ce8..b879fb32833c7dd8bdb911adddd7db9f007a9fc5 100644
--- a/producer/api/cpp/unittests/test_producer_request.cpp
+++ b/producer/api/cpp/unittests/test_producer_request.cpp
@@ -40,6 +40,7 @@ TEST(ProducerRequest, Constructor) {
     uint64_t expected_file_size = 1337;
     uint64_t expected_meta_size = 137;
     std::string expected_meta = "meta";
+    std::string expected_api_version = "v0.1";
     asapo::Opcode expected_op_code = asapo::kOpcodeTransferData;
 
     asapo::GenericRequestHeader header{expected_op_code, expected_file_id, expected_file_size,
@@ -53,7 +54,7 @@ TEST(ProducerRequest, Constructor) {
     ASSERT_THAT(request.header.data_id, Eq(expected_file_id));
     ASSERT_THAT(request.header.op_code, Eq(expected_op_code));
     ASSERT_THAT(request.header.meta_size, Eq(expected_meta_size));
-
+    ASSERT_THAT(request.header.api_version, testing::StrEq(expected_api_version));
 }
 
 
diff --git a/producer/api/cpp/unittests/test_receiver_discovery_service.cpp b/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
index 7000da7d4c01930c3906a9460405e7623140d766..014be20c89dd3d247a2e3d7134c9c43fb5131792 100644
--- a/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
+++ b/producer/api/cpp/unittests/test_receiver_discovery_service.cpp
@@ -48,7 +48,7 @@ class ReceiversStatusTests : public Test {
     NiceMock<asapo::MockLogger> mock_logger;
     NiceMock<MockHttpClient>* mock_http_client;
 
-    std::string expected_endpoint{"endpoint/asapo-discovery/asapo-receiver"};
+    std::string expected_endpoint{"endpoint/asapo-discovery/v0.1/asapo-receiver?protocol=v0.1"};
     ReceiverDiscoveryService status{"endpoint", 20};
 
     void SetUp() override {
diff --git a/producer/api/cpp/unittests/test_request_handler_tcp.cpp b/producer/api/cpp/unittests/test_request_handler_tcp.cpp
index 25a88248bb4b2b3ed107704870670260d9a1699e..1d7b812cd1e88df4915b65a83639774ef8ececb8 100644
--- a/producer/api/cpp/unittests/test_request_handler_tcp.cpp
+++ b/producer/api/cpp/unittests/test_request_handler_tcp.cpp
@@ -120,7 +120,7 @@ class RequestHandlerTcpTests : public testing::Test {
   bool retry;
   Sequence seq_receive[2];
   void ExpectFailConnect(bool only_once = false);
-  void ExpectFailAuthorize(bool only_once = false);
+  void ExpectFailAuthorize(asapo::NetworkErrorCode error_code);
   void ExpectOKAuthorize(bool only_once = false);
   void ExpectFailSendHeader(bool only_once = false);
   void ExpectFailSend(uint64_t expected_size, bool only_once);
@@ -191,59 +191,68 @@ void RequestHandlerTcpTests::ExpectFailConnect(bool only_once) {
 
 }
 
-void RequestHandlerTcpTests::ExpectFailAuthorize(bool only_once) {
-    int i = 0;
-    for (auto expected_sd : expected_sds) {
-        EXPECT_CALL(mock_io,
-                    Send_t(expected_sd, M_CheckSendRequest(asapo::kOpcodeAuthorize, 0, 0, expected_beamtime_id,
-                                                               ""),
-                           sizeof(asapo::GenericRequestHeader), _))
-            .WillOnce(
-                DoAll(
-                    testing::SetArgPointee<3>(nullptr),
-                    Return(sizeof(asapo::GenericRequestHeader))
-                ));
-
-        EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendResponse), _))
-            .InSequence(seq_receive[i])
-            .WillOnce(
-                DoAll(
-                    testing::SetArgPointee<3>(nullptr),
-                    A_WriteSendResponse(asapo::kNetAuthorizationError, expected_auth_message),
-                    testing::ReturnArg<2>()
-                ));
-        EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _));
-        if (only_once) {
-            EXPECT_CALL(mock_logger, Debug(AllOf(
-                HasSubstr("disconnected"),
-                HasSubstr(receivers_list[i])
-                                           )
+void RequestHandlerTcpTests::ExpectFailAuthorize(asapo::NetworkErrorCode error_code) {
+    auto expected_sd = expected_sds[0];
+    EXPECT_CALL(mock_io,
+                Send_t(expected_sd, M_CheckSendRequest(asapo::kOpcodeAuthorize, 0, 0, "",
+                                                       ""),
+                       sizeof(asapo::GenericRequestHeader), _))
+        .WillOnce(
+            DoAll(
+                testing::SetArgPointee<3>(nullptr),
+                Return(sizeof(asapo::GenericRequestHeader))
+            ));
+    EXPECT_CALL(mock_io,
+                Send_t(expected_sd, _, strlen(expected_beamtime_id), _))
+        .WillOnce(
+            DoAll(
+                testing::SetArgPointee<3>(nullptr),
+                Return(strlen(expected_beamtime_id))
             ));
 
-            EXPECT_CALL(mock_logger, Error(AllOf(
-                HasSubstr("authorization"),
-                HasSubstr(expected_auth_message),
-                HasSubstr(receivers_list[i])
-                                           )
+    EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendResponse), _))
+        .InSequence(seq_receive[0])
+        .WillOnce(
+            DoAll(
+                testing::SetArgPointee<3>(nullptr),
+                A_WriteSendResponse(error_code, expected_auth_message),
+                testing::ReturnArg<2>()
             ));
-        }
-        if (only_once) break;
-        i++;
-    }
+    EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _));
+    EXPECT_CALL(mock_logger, Debug(AllOf(
+        HasSubstr("disconnected"),
+        HasSubstr(receivers_list[0])
+                                   )
+    ));
+
+    EXPECT_CALL(mock_logger, Error(AllOf(
+        HasSubstr("authorization"),
+        HasSubstr(expected_auth_message),
+        HasSubstr(receivers_list[0])
+                                   )
+    ));
 }
 
+
 void RequestHandlerTcpTests::ExpectOKAuthorize(bool only_once) {
     int i = 0;
     for (auto expected_sd : expected_sds) {
         EXPECT_CALL(mock_io,
-                    Send_t(expected_sd, M_CheckSendRequest(asapo::kOpcodeAuthorize, 0, 0, expected_beamtime_id,
-                                                               ""),
+                    Send_t(expected_sd, M_CheckSendRequest(asapo::kOpcodeAuthorize, 0, 0, "",
+                                                           ""),
                            sizeof(asapo::GenericRequestHeader), _))
             .WillOnce(
                 DoAll(
                     testing::SetArgPointee<3>(nullptr),
                     Return(sizeof(asapo::GenericRequestHeader))
                 ));
+        EXPECT_CALL(mock_io,
+                    Send_t(expected_sd, _, strlen(expected_beamtime_id), _))
+            .WillOnce(
+                DoAll(
+                    testing::SetArgPointee<3>(nullptr),
+                    Return(strlen(expected_beamtime_id))
+                ));
 
         EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendResponse), _))
             .InSequence(seq_receive[i])
@@ -270,10 +279,10 @@ void RequestHandlerTcpTests::ExpectFailSendHeader(bool only_once) {
     int i = 0;
     for (auto expected_sd : expected_sds) {
         EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendRequest(expected_op_code,
-                                                                        expected_file_id,
-                                                                        expected_file_size,
-                                                                        expected_file_name,
-                                                                        expected_stream),
+                                                                    expected_file_id,
+                                                                    expected_file_size,
+                                                                    expected_file_name,
+                                                                    expected_stream),
                                     sizeof(asapo::GenericRequestHeader), _))
             .WillOnce(
                 DoAll(
@@ -442,10 +451,10 @@ void RequestHandlerTcpTests::ExpectOKSendFile(bool only_once) {
 void RequestHandlerTcpTests::ExpectOKSendHeader(bool only_once, asapo::Opcode opcode) {
     for (auto expected_sd : expected_sds) {
         EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendRequest(opcode,
-                                                                        expected_file_id,
-                                                                        expected_file_size,
-                                                                        expected_file_name,
-                                                                        expected_stream),
+                                                                    expected_file_id,
+                                                                    expected_file_size,
+                                                                    expected_file_name,
+                                                                    expected_stream),
                                     sizeof(asapo::GenericRequestHeader), _))
             .WillOnce(
                 DoAll(
@@ -581,7 +590,7 @@ TEST_F(RequestHandlerTcpTests, TriesConnectWhenNotConnected) {
 
 TEST_F(RequestHandlerTcpTests, FailsWhenCannotAuthorize) {
     ExpectOKConnect(true);
-    ExpectFailAuthorize(true);
+    ExpectFailAuthorize(asapo::kNetAuthorizationError);
 
     request_handler.PrepareProcessingRequestLocked();
     auto success = request_handler.ProcessRequestUnlocked(&request, &retry);
@@ -595,6 +604,21 @@ TEST_F(RequestHandlerTcpTests, FailsWhenCannotAuthorize) {
 
 }
 
+TEST_F(RequestHandlerTcpTests, FailsWhenUnsupportedClient) {
+    ExpectOKConnect(true);
+    ExpectFailAuthorize(asapo::kNetErrorNotSupported);
+
+    request_handler.PrepareProcessingRequestLocked();
+    auto success = request_handler.ProcessRequestUnlocked(&request, &retry);
+    request_handler.TearDownProcessingRequestLocked(success);
+
+    ASSERT_THAT(n_connections, Eq(0));
+    ASSERT_THAT(callback_err, Eq(asapo::ProducerErrorTemplates::kUnsupportedClient));
+    ASSERT_THAT(callback_called, Eq(true));
+    ASSERT_THAT(success, Eq(false));
+    ASSERT_THAT(retry, Eq(false));
+}
+
 TEST_F(RequestHandlerTcpTests, DoesNotTryConnectWhenConnected) {
     DoSingleSend();
 
@@ -754,6 +778,12 @@ TEST_F(RequestHandlerTcpTests, ImmediatelyCallBackErrorIfAuthorizationFailure) {
     AssertImmediatelyCallBack(asapo::kNetAuthorizationError, asapo::ProducerErrorTemplates::kWrongInput);
 }
 
+
+TEST_F(RequestHandlerTcpTests, ImmediatelyCallBackErrorIfNotSupportedfailure) {
+    AssertImmediatelyCallBack(asapo::kNetErrorNotSupported, asapo::ProducerErrorTemplates::kUnsupportedClient);
+}
+
+
 TEST_F(RequestHandlerTcpTests, ImmediatelyCallBackErrorIfWrongMetadata) {
     AssertImmediatelyCallBack(asapo::kNetErrorWrongRequest, asapo::ProducerErrorTemplates::kWrongInput);
 }
diff --git a/producer/api/python/asapo_producer.pxd b/producer/api/python/asapo_producer.pxd
index c387fe773f2a4f277bcef12ce96dee4b4fa8dcd6..cd627b6203aa0e9322625f893d7203dfd392c6a1 100644
--- a/producer/api/python/asapo_producer.pxd
+++ b/producer/api/python/asapo_producer.pxd
@@ -21,16 +21,15 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   ErrorTemplateInterface kWrongInput "asapo::ProducerErrorTemplates::kWrongInput"
   ErrorTemplateInterface kLocalIOError "asapo::ProducerErrorTemplates::kLocalIOError"
   ErrorTemplateInterface kServerWarning "asapo::ProducerErrorTemplates::kServerWarning"
-
-
+  ErrorTemplateInterface kRequestPoolIsFull "asapo::ProducerErrorTemplates::kRequestPoolIsFull"
+  ErrorTemplateInterface kUnsupportedClient "asapo::ProducerErrorTemplates::kUnsupportedClient"
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   cppclass MessageData:
     uint8_t[] release()
     uint8_t[] get()
   cppclass StreamInfo:
-    string Json(bool add_last_id)
-    bool SetFromJson(string json_str, bool read_last_id)
+    string Json()
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
   cppclass RequestHandlerType:
@@ -100,10 +99,13 @@ cdef extern from "asapo/asapo_producer.h" namespace "asapo" nogil:
         void StopThreads__()
         void SetLogLevel(LogLevel level)
         uint64_t  GetRequestsQueueSize()
+        uint64_t  GetRequestsQueueVolumeMb()
+        void SetRequestsQueueLimits(uint64_t size, uint64_t volume)
         Error WaitRequestsFinished(uint64_t timeout_ms)
         Error SendStreamFinishedFlag(string stream, uint64_t last_id, string next_stream, RequestCallback callback)
         StreamInfo GetStreamInfo(string stream, uint64_t timeout_ms, Error* err)
         StreamInfo GetLastStream(uint64_t timeout_ms, Error* err)
+        Error GetVersionInfo(string* client_info,string* server_info, bool* supported)
 
 
 cdef extern from "asapo/asapo_producer.h" namespace "asapo":
diff --git a/producer/api/python/asapo_producer.pyx.in b/producer/api/python/asapo_producer.pyx.in
index 67bb53dc169f1dd38806a6860dd2b8e85feb7ba5..5391c8ce8ea41e3eada8ad182b8e4cc755d3feb1 100644
--- a/producer/api/python/asapo_producer.pyx.in
+++ b/producer/api/python/asapo_producer.pyx.in
@@ -51,6 +51,11 @@ class AsapoTimeOutError(AsapoProducerError):
 class AsapoServerWarning(AsapoProducerError):
   pass
 
+class AsapoRequestsPoolIsFull(AsapoProducerError):
+  pass
+
+class AsapoUnsupportedClientError(AsapoProducerError):
+  pass
 
 cdef python_exception_from_error(Error& err):
     error_string =  _str(err.get().Explain())
@@ -62,6 +67,10 @@ cdef python_exception_from_error(Error& err):
             return AsapoLocalIOError(error_string)
     elif err == kServerWarning:
             return AsapoServerWarning(error_string)
+    elif err == kRequestPoolIsFull:
+            return AsapoRequestsPoolIsFull(error_string)
+    elif err == kUnsupportedClient:
+            raise AsapoUnsupportedClientError(error_string)
     else:
         return AsapoProducerError(error_string)
 
@@ -106,7 +115,21 @@ cdef class PyProducer:
             print("wrong loglevel mode: "+ level)
             return
          self.c_producer.get().SetLogLevel(log_level)
-
+    def get_version_info(self, from_server = "true"):
+        cdef string client_info,server_info
+        cdef bool supported
+        cdef string* p_server_info =  &server_info if from_server else <string*>NULL
+        cdef bool* p_supported =  &supported if from_server else <bool*>NULL
+        cdef Error err
+        with nogil:
+                err =  self.c_producer.get().GetVersionInfo(&client_info,p_server_info,p_supported)
+        if err:
+            throw_exception(err)
+        version = {}
+        if from_server:
+            return {'client': _str(client_info), 'server': _str(server_info), 'supported': supported}
+        else:
+            return {'client': _str(client_info)}
     def __send_np_array(self, id, exposed_path,data, user_meta=None,dataset=None,stream="default",ingest_mode = DEFAULT_INGEST_MODE,callback=None):
         cdef MessageHeader message_header = self.create_message_header(id,exposed_path,user_meta,dataset,ingest_mode)
         if data is None:
@@ -216,7 +239,7 @@ cdef class PyProducer:
             info = self.c_producer.get().GetStreamInfo(b_stream,timeout_ms,&err)
         if err:
             throw_exception(err)
-        return json.loads(_str(info.Json(True)))
+        return json.loads(_str(info.Json()))
 
     def last_stream(self, uint64_t timeout_ms = 1000):
         """
@@ -233,7 +256,7 @@ cdef class PyProducer:
             info = self.c_producer.get().GetLastStream(timeout_ms,&err)
         if err:
             throw_exception(err)
-        return json.loads(_str(info.Json(True)))
+        return json.loads(_str(info.Json()))
     def send_file(self, uint64_t id, local_path, exposed_path, user_meta=None, dataset=None, ingest_mode = DEFAULT_INGEST_MODE, stream = "default", callback=None):
         """
          :param id: unique data id
@@ -271,6 +294,10 @@ cdef class PyProducer:
         return
     def get_requests_queue_size(self):
         return self.c_producer.get().GetRequestsQueueSize()
+    def get_requests_queue_volume_mb(self):
+        return self.c_producer.get().GetRequestsQueueVolumeMb()
+    def set_requests_queue_limits(self,uint64_t size = 0, uint64_t volume_mb = 0):
+        return self.c_producer.get().SetRequestsQueueLimits(size,volume_mb)
     def wait_requests_finished(self,timeout_ms):
         """
          :param timeout_ms: timeout in milliseconds
diff --git a/producer/event_monitor_producer/src/main_eventmon.cpp b/producer/event_monitor_producer/src/main_eventmon.cpp
index d30e479a8bf6e8af518b818ee0aa4cc8accaf040..72446fa522c48bbdd9724fedc639985874afe1ae 100644
--- a/producer/event_monitor_producer/src/main_eventmon.cpp
+++ b/producer/event_monitor_producer/src/main_eventmon.cpp
@@ -15,7 +15,7 @@
 #include "asapo/preprocessor/definitions.h"
 
 #include "asapo/io/io_factory.h"
-#include "asapo/common/version.h"
+#include "asapo/common/internal/version.h"
 
 using asapo::Producer;
 using asapo::EventMonConfigFactory;
diff --git a/receiver/src/main.cpp b/receiver/src/main.cpp
index 2f119b2005f4e68a7983b4be247874df7217e735..e00c94fbd97b8e9047bb34d52c49c18cf9b92546 100644
--- a/receiver/src/main.cpp
+++ b/receiver/src/main.cpp
@@ -6,7 +6,7 @@
 #include "receiver_config.h"
 
 #include "receiver_data_server/receiver_data_server_logger.h"
-#include "asapo/common/version.h"
+#include "asapo/common/internal/version.h"
 
 #include "receiver_data_server/receiver_data_server.h"
 #include "receiver_data_server/net_server/rds_tcp_server.h"
diff --git a/receiver/src/receiver_config.cpp b/receiver/src/receiver_config.cpp
index 82383d3d2b32461fe8c418e71c31438e87ef7495..78052168bdddcce8578809b13d266fb6f4cd9818 100644
--- a/receiver/src/receiver_config.cpp
+++ b/receiver/src/receiver_config.cpp
@@ -19,6 +19,7 @@ Error ReceiverConfigFactory::SetConfig(std::string file_name) {
     Error err;
 
     (err = parser.GetString("PerformanceDbServer", &config.performance_db_uri)) ||
+    (err = parser.GetBool("MonitorPerformance", &config.monitor_performance)) ||
     (err = parser.GetUInt64("ListenPort", &config.listen_port)) ||
     (err = parser.GetUInt64("ReceiveToDiskThresholdMB", &config.receive_to_disk_threshold_mb)) ||
     (err = parser.Embedded("DataServer").GetUInt64("ListenPort", &config.dataserver.listen_port)) ||
diff --git a/receiver/src/receiver_config.h b/receiver/src/receiver_config.h
index 74316f7e6449af6fc9dface08d59876329065774..c8128362a7e1d51075f54d327aab620dc35f6fb5 100644
--- a/receiver/src/receiver_config.h
+++ b/receiver/src/receiver_config.h
@@ -11,6 +11,7 @@ namespace asapo {
 struct ReceiverConfig {
     std::string performance_db_uri;
     std::string performance_db_name;
+    bool monitor_performance = false;
     std::string database_uri;
     uint64_t listen_port = 0;
     std::string authorization_server;
diff --git a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
index 82165357d48d7fdb3985b28eed9cdb9ca00d7d1e..0f8f387faf1d2bd20490a4e6b409612b1c81e81d 100644
--- a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
+++ b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.cpp
@@ -1,7 +1,7 @@
 #include "receiver_data_server_request_handler.h"
 
 #include "../receiver_data_server_error.h"
-
+#include "asapo/common/internal/version.h"
 namespace asapo {
 
 ReceiverDataServerRequestHandler::ReceiverDataServerRequestHandler(RdsNetServer* server,
@@ -11,8 +11,19 @@ ReceiverDataServerRequestHandler::ReceiverDataServerRequestHandler(RdsNetServer*
 }
 
 
-bool ReceiverDataServerRequestHandler::CheckRequest(const ReceiverDataServerRequest* request) {
-    return  request->header.op_code == kOpcodeGetBufferData;
+bool ReceiverDataServerRequestHandler::CheckRequest(const ReceiverDataServerRequest* request,NetworkErrorCode* code) {
+    if (request->header.op_code != kOpcodeGetBufferData) {
+        *code = kNetErrorWrongRequest;
+        return false;
+    }
+    int verClient = VersionToNumber(request->header.api_version);
+    int verService = VersionToNumber(GetRdsApiVersion());
+    if (verClient > verService) {
+        *code = kNetErrorNotSupported;
+        return false;
+    }
+
+    return true;
 }
 
 Error ReceiverDataServerRequestHandler::SendResponse(const ReceiverDataServerRequest* request, NetworkErrorCode code) {
@@ -45,8 +56,9 @@ CacheMeta* ReceiverDataServerRequestHandler::GetSlotAndLock(const ReceiverDataSe
 bool ReceiverDataServerRequestHandler::ProcessRequestUnlocked(GenericRequest* request, bool* retry) {
     *retry = false;
     auto receiver_request = dynamic_cast<ReceiverDataServerRequest*>(request);
-    if (!CheckRequest(receiver_request)) {
-        HandleInvalidRequest(receiver_request);
+    NetworkErrorCode code;
+    if (!CheckRequest(receiver_request,&code)) {
+        HandleInvalidRequest(receiver_request,code);
         return true;
     }
 
@@ -78,10 +90,18 @@ void ReceiverDataServerRequestHandler::ProcessRequestTimeout(GenericRequest* /*r
 // do nothing
 }
 
-void ReceiverDataServerRequestHandler::HandleInvalidRequest(const ReceiverDataServerRequest* receiver_request) {
-    SendResponse(receiver_request, kNetErrorWrongRequest);
+void ReceiverDataServerRequestHandler::HandleInvalidRequest(const ReceiverDataServerRequest* receiver_request,NetworkErrorCode code) {
+    SendResponse(receiver_request, code);
     server_->HandleAfterError(receiver_request->source_id);
-    log__->Error("wrong request, code:" + std::to_string(receiver_request->header.op_code));
+    switch (code) {
+        case NetworkErrorCode::kNetErrorWrongRequest:
+            log__->Error("wrong request, code:" + std::to_string(receiver_request->header.op_code));
+            break;
+        case NetworkErrorCode::kNetErrorNotSupported:
+            log__->Error("unsupported client, version: " + std::string(receiver_request->header.api_version));
+            break;
+    };
+
 }
 
 void ReceiverDataServerRequestHandler::HandleValidRequest(const ReceiverDataServerRequest* receiver_request,
diff --git a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
index 10c6633bd6963a18413bba8698a240bf89dcd7d0..18fc5937a793c9358376d24ec0480496ce93e88a 100644
--- a/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
+++ b/receiver/src/receiver_data_server/request_handler/receiver_data_server_request_handler.h
@@ -24,12 +24,12 @@ class ReceiverDataServerRequestHandler: public RequestHandler {
   private:
     RdsNetServer* server_;
     DataCache* data_cache_;
-    bool CheckRequest(const ReceiverDataServerRequest* request);
+    bool CheckRequest(const ReceiverDataServerRequest* request,NetworkErrorCode* code);
     Error SendResponse(const ReceiverDataServerRequest* request, NetworkErrorCode code);
     Error SendResponseAndSlotData(const ReceiverDataServerRequest* request, const CacheMeta* meta);
     CacheMeta* GetSlotAndLock(const ReceiverDataServerRequest* request);
 
-    void HandleInvalidRequest(const ReceiverDataServerRequest* receiver_request);
+    void HandleInvalidRequest(const ReceiverDataServerRequest* receiver_request,NetworkErrorCode code);
 
     void HandleValidRequest(const ReceiverDataServerRequest* receiver_request, const CacheMeta* meta);
 };
diff --git a/receiver/src/receiver_error.h b/receiver/src/receiver_error.h
index 3e30c4fd28a08c7a4fd8768cfae7ee89c20c3dd0..210c116cadfa8d9b1f64f2894cc014ec7355bbd2 100644
--- a/receiver/src/receiver_error.h
+++ b/receiver/src/receiver_error.h
@@ -11,7 +11,8 @@ enum class ReceiverErrorType {
     kAuthorizationFailure,
     kInternalServerError,
     kReAuthorizationFailure,
-    kWarningDuplicatedRequest
+    kWarningDuplicatedRequest,
+    kUnsupportedClient
 };
 
 using ReceiverErrorTemplate = ServiceErrorTemplate<ReceiverErrorType, ErrorType::kReceiverError>;
@@ -41,6 +42,13 @@ auto const kAuthorizationFailure = ReceiverErrorTemplate {
     "authorization failure", ReceiverErrorType::kAuthorizationFailure
 };
 
+auto const kUnsupportedClient = ReceiverErrorTemplate {
+    "client version not supported", ReceiverErrorType::kUnsupportedClient
+};
+
+
+
+
 auto const kReAuthorizationFailure = ReceiverErrorTemplate {
     "reauthorization for auto beamtime failed", ReceiverErrorType::kReAuthorizationFailure
 };
diff --git a/receiver/src/request.cpp b/receiver/src/request.cpp
index 5bf9c1ab2cbdd38f9c0dd8339e0a86912bc163b6..7db51dfd02f8aa91f097ecab8bfdb8747f9e3add 100644
--- a/receiver/src/request.cpp
+++ b/receiver/src/request.cpp
@@ -89,6 +89,11 @@ std::string Request::GetStream() const {
     return request_header_.stream;
 }
 
+std::string Request::GetApiVersion() const {
+    return request_header_.api_version;
+}
+
+
 const std::string& Request::GetOriginUri() const {
     return origin_uri_;
 }
diff --git a/receiver/src/request.h b/receiver/src/request.h
index f24ad10fcc1927e69e80a2edf91a64efbc73cc26..141430e7748b09ee46c1c596d3efa2c992e59759 100644
--- a/receiver/src/request.h
+++ b/receiver/src/request.h
@@ -44,6 +44,7 @@ class Request {
     VIRTUAL uint64_t GetDataID() const;
     VIRTUAL std::string GetFileName() const;
     VIRTUAL std::string GetStream() const;
+    VIRTUAL std::string GetApiVersion() const;
     VIRTUAL void* GetData() const;
     VIRTUAL Opcode GetOpCode() const;
     VIRTUAL const char* GetMessage() const;
diff --git a/receiver/src/request_handler/request_factory.cpp b/receiver/src/request_handler/request_factory.cpp
index fdacdd94a3c4eed52152fddc0489786f85b76818..de3f74cec40946b86836c391a28e85001bef8320 100644
--- a/receiver/src/request_handler/request_factory.cpp
+++ b/receiver/src/request_handler/request_factory.cpp
@@ -47,7 +47,9 @@ Error RequestFactory::AddReceiveDirectToFileHandler(std::unique_ptr<Request> &re
 
 Error RequestFactory::AddHandlersToRequest(std::unique_ptr<Request> &request,
                                            const GenericRequestHeader &request_header) const {
-    request->AddHandler(&request_handler_authorize_);
+    if (request_header.op_code != Opcode::kOpcodeAuthorize) {
+        request->AddHandler(&request_handler_authorize_);
+    }
 
     switch (request_header.op_code) {
         case Opcode::kOpcodeTransferData:
@@ -73,7 +75,8 @@ Error RequestFactory::AddHandlersToRequest(std::unique_ptr<Request> &request,
             break;
         }
         case Opcode::kOpcodeAuthorize: {
-            // do nothing
+            request->AddHandler(&request_handler_receive_metadata_);
+            request->AddHandler(&request_handler_authorize_);
             break;
         }
         case Opcode::kOpcodeStreamInfo: {
diff --git a/receiver/src/request_handler/request_handler_authorize.cpp b/receiver/src/request_handler/request_handler_authorize.cpp
index f13fc1ae28a41cedcd44ba348544e4c1e6c9d4fa..314b351a46acf31fc5ef08312a4ee989523fc2a8 100644
--- a/receiver/src/request_handler/request_handler_authorize.cpp
+++ b/receiver/src/request_handler/request_handler_authorize.cpp
@@ -4,6 +4,7 @@
 #include "../request.h"
 
 #include "asapo/json_parser/json_parser.h"
+#include "asapo/common/internal/version.h"
 
 using std::chrono::system_clock;
 
@@ -26,6 +27,15 @@ Error RequestHandlerAuthorize::ErrorFromAuthorizationServerResponse(const Error&
     }
 }
 
+Error CheckAccessType(const std::vector<std::string>& access_types) {
+    if(std::find(access_types.begin(), access_types.end(), "write") != access_types.end()) {
+        return nullptr;
+    } else {
+        return asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate("wrong access types");
+    }
+}
+
+
 Error RequestHandlerAuthorize::Authorize(Request* request, const char* source_credentials) const {
     HttpCode code;
     Error err;
@@ -42,6 +52,7 @@ Error RequestHandlerAuthorize::Authorize(Request* request, const char* source_cr
     }
 
     std::string stype;
+    std::vector<std::string> access_types;
 
     JsonStringParser parser{response};
     (err = parser.GetString("beamtimeId", &beamtime_id_)) ||
@@ -49,21 +60,41 @@ Error RequestHandlerAuthorize::Authorize(Request* request, const char* source_cr
     (err = parser.GetString("core-path", &offline_path_)) ||
     (err = parser.GetString("beamline-path", &online_path_)) ||
     (err = parser.GetString("source-type", &stype)) ||
+    (err = parser.GetArrayString("access-types", &access_types)) ||
     (err = GetSourceTypeFromString(stype, &source_type_)) ||
     (err = parser.GetString("beamline", &beamline_));
     if (err) {
         return ErrorFromAuthorizationServerResponse(err, code);
-    } else {
-        log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() +"source type: "+stype+ " beamline: " +
-                     beamline_ + ", beamtime id: " + beamtime_id_ + ", data soucre: " + data_source_);
     }
 
+    err = CheckAccessType(access_types);
+    if (err) {
+        log__->Error("failure authorizing at " + GetReceiverConfig()->authorization_server + " request: " + request_string +
+            " - " +
+            err->Explain());
+        return err;
+    }
+
+    log__->Debug(std::string("authorized connection from ") + request->GetOriginUri() +"source type: "+stype+ " beamline: " +
+                     beamline_ + ", beamtime id: " + beamtime_id_ + ", data soucre: " + data_source_);
+
     last_updated_ = system_clock::now();
     cached_source_credentials_ = source_credentials;
 
     return nullptr;
 }
 
+Error RequestHandlerAuthorize::CheckVersion(const std::string& version_from_client) const {
+    int verClient = VersionToNumber(version_from_client);
+    int verService = VersionToNumber(GetReceiverApiVersion());
+    if (verClient > verService) {
+        auto err_string = "client version: "+version_from_client + ", server version: "+GetReceiverApiVersion();
+        return asapo::ReceiverErrorTemplates::kUnsupportedClient.Generate(err_string);
+        log__->Error("failure serving client - unsupported version,  " + err_string);
+    }
+    return nullptr;
+}
+
 Error RequestHandlerAuthorize::ProcessAuthorizationRequest(Request* request) const {
     if (!cached_source_credentials_.empty()) {
         Error auth_error = asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate();
@@ -73,7 +104,13 @@ Error RequestHandlerAuthorize::ProcessAuthorizationRequest(Request* request) con
         return auth_error;
     }
 
-    return Authorize(request, request->GetMessage());
+    auto err = CheckVersion(request->GetApiVersion());
+    if (err) {
+        log__->Error("failure authorizing at client: " + err->Explain());
+        return err;
+    }
+
+    return Authorize(request, request->GetMetaData().c_str());
 }
 
 Error RequestHandlerAuthorize::ProcessReAuthorization(Request* request) const {
diff --git a/receiver/src/request_handler/request_handler_authorize.h b/receiver/src/request_handler/request_handler_authorize.h
index 1798ea8fb31de1e8d0e99459bcf447b2763085ea..3e5e44c31fb985c58aceeb5c50c65a88ad01b89d 100644
--- a/receiver/src/request_handler/request_handler_authorize.h
+++ b/receiver/src/request_handler/request_handler_authorize.h
@@ -35,6 +35,8 @@ class RequestHandlerAuthorize final: public ReceiverRequestHandler {
     Error ProcessReAuthorization(Request* request) const;
     bool NeedReauthorize() const;
     std::string GetRequestString(const Request* request, const char* source_credentials) const;
+    Error CheckVersion(const std::string& version_from_client) const;
+
 };
 
 }
diff --git a/receiver/src/request_handler/request_handler_db_last_stream.cpp b/receiver/src/request_handler/request_handler_db_last_stream.cpp
index ed5978b6e7f62a7241369b356824813795453ece..7e31468f565b0306b0334cc18420b43ba797f6d5 100644
--- a/receiver/src/request_handler/request_handler_db_last_stream.cpp
+++ b/receiver/src/request_handler/request_handler_db_last_stream.cpp
@@ -20,7 +20,7 @@ Error RequestHandlerDbLastStream::ProcessRequest(Request* request) const {
     if (!err) {
         log__->Debug(std::string{"get last stream "} + " in " +
             db_name_ + " at " + GetReceiverConfig()->database_uri);
-        request->SetResponseMessage(info.Json(true), ResponseMessageType::kInfo);
+        request->SetResponseMessage(info.Json(), ResponseMessageType::kInfo);
     }
     return err;
 }
diff --git a/receiver/src/request_handler/request_handler_db_stream_info.cpp b/receiver/src/request_handler/request_handler_db_stream_info.cpp
index 20221ba8c3babb5466046cde16baee96f145bfe0..65d194ccfa1f570fa51341d58e6e3b799a50528c 100644
--- a/receiver/src/request_handler/request_handler_db_stream_info.cpp
+++ b/receiver/src/request_handler/request_handler_db_stream_info.cpp
@@ -21,7 +21,7 @@ Error RequestHandlerDbStreamInfo::ProcessRequest(Request* request) const {
         log__->Debug(std::string{"get stream info from "} + col_name + " in " +
                      db_name_ + " at " + GetReceiverConfig()->database_uri);
         info.name = request->GetStream();
-        request->SetResponseMessage(info.Json(true), ResponseMessageType::kInfo);
+        request->SetResponseMessage(info.Json(), ResponseMessageType::kInfo);
     }
     return err;
 }
diff --git a/receiver/src/request_handler/requests_dispatcher.cpp b/receiver/src/request_handler/requests_dispatcher.cpp
index 7debacf5013ee5c1fdc16dba815f52ad6a7f864e..2487f2d70ce0d86c4a325e7a7ae0236277a14152 100644
--- a/receiver/src/request_handler/requests_dispatcher.cpp
+++ b/receiver/src/request_handler/requests_dispatcher.cpp
@@ -18,6 +18,8 @@ NetworkErrorCode GetNetworkCodeFromError(const Error& err) {
     if (err) {
         if (err == ReceiverErrorTemplates::kAuthorizationFailure) {
             return NetworkErrorCode::kNetAuthorizationError;
+        } else if (err == ReceiverErrorTemplates::kUnsupportedClient) {
+            return NetworkErrorCode::kNetErrorNotSupported;
         } else if (err == ReceiverErrorTemplates::kReAuthorizationFailure) {
             return NetworkErrorCode::kNetErrorReauthorize;
         } else if (err == DBErrorTemplates::kJsonParseError || err == ReceiverErrorTemplates::kBadRequest) {
diff --git a/receiver/src/statistics/statistics.cpp b/receiver/src/statistics/statistics.cpp
index b35da64b1c6007ee9fda78bc1b55339a81db4bba..4549f1bc43f8d45376f55220f86efb2bed5532b2 100644
--- a/receiver/src/statistics/statistics.cpp
+++ b/receiver/src/statistics/statistics.cpp
@@ -1,7 +1,7 @@
 #include "statistics.h"
 #include "statistics_sender_influx_db.h"
 #include "statistics_sender_fluentd.h"
-
+#include "../receiver_config.h"
 #include <algorithm>
 
 using std::chrono::system_clock;
@@ -9,6 +9,9 @@ using std::chrono::system_clock;
 namespace asapo {
 
 void Statistics::SendIfNeeded(bool send_always) noexcept {
+    if (!GetReceiverConfig()->monitor_performance) {
+        return;
+    }
     if (send_always || GetTotalElapsedMs() > write_interval_) {
         std::lock_guard<std::mutex> lock{mutex_};
         Send();
diff --git a/receiver/unittests/mock_receiver_config.cpp b/receiver/unittests/mock_receiver_config.cpp
index 916f026baa7d64b274d65ee810ca8fe2f230c25b..17e236cbdd665969a80682f1e62e50e49e987430 100644
--- a/receiver/unittests/mock_receiver_config.cpp
+++ b/receiver/unittests/mock_receiver_config.cpp
@@ -44,6 +44,7 @@ Error SetReceiverConfig (const ReceiverConfig& config, std::string error_field)
     auto config_string = std::string("{") + Key("PerformanceDbServer",
                                                 error_field) + "\"" + config.performance_db_uri + "\"";
     config_string += "," + Key("PerformanceDbName", error_field) + "\"" + config.performance_db_name + "\"";
+    config_string += "," + Key("MonitorPerformance", error_field) + (config.monitor_performance?"true":"false");
     config_string += "," + Key("DatabaseServer", error_field) + "\"" + config.database_uri + "\"";
     config_string += "," + Key("DiscoveryServer", error_field) + "\"" + config.discovery_server + "\"";
     config_string += "," + Key("ListenPort", error_field) + std::to_string(config.listen_port);
diff --git a/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp b/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
index eedc2f99fd39f534183c4b9256eb16782c6d3791..e5c495b00a1d7b9c8663f7fd48be97487c95aadc 100644
--- a/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
+++ b/receiver/unittests/receiver_data_server/request_handler/test_request_handler.cpp
@@ -122,6 +122,18 @@ TEST_F(RequestHandlerTests, ProcessRequest_WrongOpCode) {
     ASSERT_THAT(success, Eq(true));
 }
 
+TEST_F(RequestHandlerTests, ProcessRequest_WrongClientVersion) {
+    strcpy(request.header.api_version,"v0.2");
+    MockSendResponse(asapo::kNetErrorNotSupported, false);
+    EXPECT_CALL(mock_net, HandleAfterError_t(expected_source_id));
+
+    EXPECT_CALL(mock_logger, Error(HasSubstr("unsupported client")));
+
+    auto success = handler.ProcessRequestUnlocked(&request, &retry);
+
+    ASSERT_THAT(success, Eq(true));
+}
+
 TEST_F(RequestHandlerTests, ProcessRequest_ReturnsNoDataWhenCacheNotUsed) {
     MockSendResponse(asapo::kNetErrorNoData, true);
 
diff --git a/receiver/unittests/receiver_mocking.h b/receiver/unittests/receiver_mocking.h
index 200aeed190a32fbf9b8b958b87723928e11be734..8711a6e70ce6a0bcc26173113891188d3313e197 100644
--- a/receiver/unittests/receiver_mocking.h
+++ b/receiver/unittests/receiver_mocking.h
@@ -66,6 +66,7 @@ class MockRequest: public Request {
 
     MOCK_CONST_METHOD0(GetFileName, std::string());
     MOCK_CONST_METHOD0(GetStream, std::string());
+    MOCK_CONST_METHOD0(GetApiVersion, std::string());
     MOCK_CONST_METHOD0(GetDataSize, uint64_t());
     MOCK_CONST_METHOD0(GetDataID, uint64_t());
     MOCK_CONST_METHOD0(GetSlotId, uint64_t());
diff --git a/receiver/unittests/request_handler/test_request_factory.cpp b/receiver/unittests/request_handler/test_request_factory.cpp
index 5f224d3d01135bacd0906518eccd10ba51c7aa72..a6c1d02b6c6479928f8b625d96fdcd1ba73a331b 100644
--- a/receiver/unittests/request_handler/test_request_factory.cpp
+++ b/receiver/unittests/request_handler/test_request_factory.cpp
@@ -121,7 +121,8 @@ TEST_F(FactoryTests, ReturnsDataRequestForAuthorizationCode) {
 
     ASSERT_THAT(err, Eq(nullptr));
     ASSERT_THAT(dynamic_cast<asapo::Request*>(request.get()), Ne(nullptr));
-    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerReceiveMetaData*>(request->GetListHandlers()[0]), Ne(nullptr));
+    ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[1]), Ne(nullptr));
 }
 
 TEST_F(FactoryTests, DoNotAddDiskAndDbWriterIfNotWantedInRequest) {
diff --git a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
index c1e5a97410fb279f029ee9a3447c67ba4abce3a2..e34fa8423a744a1a93a38e8152fac7fa1cd6dae7 100644
--- a/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_authorizer.cpp
@@ -72,8 +72,11 @@ class AuthorizerHandlerTests : public Test {
     std::string expected_authorization_server = "authorizer_host";
     std::string expect_request_string;
     std::string expected_source_credentials;
+    std::string expected_api_version = "v0.1";
+
     asapo::SourceType expected_source_type = asapo::SourceType::kProcessed;
     std::string expected_source_type_str = "processed";
+    std::string expected_access_type_str = "[\"write\"]";
     void MockRequestData();
     void SetUp() override {
         GenericRequestHeader request_header;
@@ -115,7 +118,8 @@ class AuthorizerHandlerTests : public Test {
                              "\",\"beamline-path\":" + "\"" + expected_beamline_path +
                              "\",\"core-path\":" + "\"" + expected_core_path +
                              "\",\"source-type\":" + "\"" + expected_source_type_str +
-                             "\",\"beamline\":" + "\"" + expected_beamline + "\"}")
+                             "\",\"beamline\":" + "\"" + expected_beamline +
+                             "\",\"access-types\":" + expected_access_type_str + "}")
                      ));
             if (code != HttpCode::OK) {
                 EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"),
@@ -126,13 +130,15 @@ class AuthorizerHandlerTests : public Test {
                                                      HasSubstr(expected_data_source),
                                                      HasSubstr(expected_producer_uri),
                                                      HasSubstr(expected_authorization_server))));
-            } else {
+            } else if (expected_access_type_str=="[\"write\"]") {
                 EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("authorized"),
                                                      HasSubstr(expected_beamtime_id),
                                                      HasSubstr(expected_beamline),
                                                      HasSubstr(expected_source_type_str),
                                                      HasSubstr(expected_data_source),
                                                      HasSubstr(expected_producer_uri))));
+            } else {
+                EXPECT_CALL(mock_logger, Error(HasSubstr("wrong")));
             }
         }
 
@@ -142,10 +148,15 @@ class AuthorizerHandlerTests : public Test {
         EXPECT_CALL(*mock_request, GetOpCode())
         .WillOnce(Return(asapo::kOpcodeAuthorize))
         ;
-        EXPECT_CALL(*mock_request, GetMessage())
-        .WillOnce(Return(expected_source_credentials.c_str()))
+        EXPECT_CALL(*mock_request, GetMetaData())
+        .WillOnce(ReturnRef(expected_source_credentials))
         ;
 
+        EXPECT_CALL(*mock_request, GetApiVersion())
+            .WillOnce(Return(expected_api_version))
+            ;
+
+
         MockAuthRequest(error, code);
         return handler.ProcessRequest(mock_request.get());
     }
@@ -206,6 +217,14 @@ TEST_F(AuthorizerHandlerTests, AuthorizeOk) {
     ASSERT_THAT(err, Eq(nullptr));
 }
 
+
+TEST_F(AuthorizerHandlerTests, AuthorizeFailsOnWrongAccessType) {
+    expected_access_type_str = "[\"read\"]";
+    auto err = MockFirstAuthorization(false);
+
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure));
+}
+
 TEST_F(AuthorizerHandlerTests, ErrorOnSecondAuthorize) {
     MockFirstAuthorization(false);
     EXPECT_CALL(*mock_request, GetOpCode())
@@ -255,6 +274,20 @@ TEST_F(AuthorizerHandlerTests, RequestAuthorizeReturnsDifferentBeamtimeId) {
 }
 
 
+TEST_F(AuthorizerHandlerTests, RequestFromUnsupportedClient) {
+    EXPECT_CALL(*mock_request, GetOpCode())
+        .WillOnce(Return(asapo::kOpcodeAuthorize))
+        ;
+    EXPECT_CALL(*mock_request, GetApiVersion())
+        .WillOnce(Return("v0.2"))
+        ;
+
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kUnsupportedClient));
+}
+
+
+
 
 TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeUsesCachedValue) {
     config.authorization_interval_ms = 10000;
diff --git a/receiver/unittests/request_handler/test_request_handler_db_last_stream.cpp b/receiver/unittests/request_handler/test_request_handler_db_last_stream.cpp
index 2e6762cb541f123ce9b529e28b23bbf648792934..9eb2310cacfa5da4097b788d42f609c19b944e25 100644
--- a/receiver/unittests/request_handler/test_request_handler_db_last_stream.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_db_last_stream.cpp
@@ -63,7 +63,7 @@ class DbMetaLastStreamTests : public Test {
     ReceiverConfig config;
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_data_source = "source";
-    std::string info_str = R"({"lastId":10,"name":"stream","timestampCreated":1000000,"timestampLast":2000000})";
+    std::string info_str = R"({"lastId":10,"name":"stream","timestampCreated":1000000,"timestampLast":2000000,"finished":false,"nextStream":""})";
     asapo::StreamInfo expected_stream_info;
     void SetUp() override {
         GenericRequestHeader request_header;
diff --git a/receiver/unittests/request_handler/test_request_handler_db_stream_info.cpp b/receiver/unittests/request_handler/test_request_handler_db_stream_info.cpp
index 1d1d96d3d2cf47a84928f603fd5660af329b4804..a2828c31eafab387f1f373bc8ffa35e531a75cfa 100644
--- a/receiver/unittests/request_handler/test_request_handler_db_stream_info.cpp
+++ b/receiver/unittests/request_handler/test_request_handler_db_stream_info.cpp
@@ -64,7 +64,7 @@ class DbMetaStreamInfoTests : public Test {
     ReceiverConfig config;
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_data_source = "source";
-    std::string info_str = R"({"lastId":10,"name":"stream","timestampCreated":1000000,"timestampLast":2000000})";
+    std::string info_str = R"({"lastId":10,"name":"stream","timestampCreated":1000000,"timestampLast":2000000,"finished":false,"nextStream":""})";
     asapo::StreamInfo expected_stream_info;
     void SetUp() override {
         GenericRequestHeader request_header;
diff --git a/receiver/unittests/request_handler/test_requests_dispatcher.cpp b/receiver/unittests/request_handler/test_requests_dispatcher.cpp
index e5d6daaf0354bcd3a9a40f0cd5de60674bf06152..b03fc381186651da8185d8722c8f77c233547b08 100644
--- a/receiver/unittests/request_handler/test_requests_dispatcher.cpp
+++ b/receiver/unittests/request_handler/test_requests_dispatcher.cpp
@@ -294,6 +294,17 @@ TEST_F(RequestsDispatcherTests, ProcessRequestReturnsAuthorizationFailure) {
     ASSERT_THAT(std::string(response.message), HasSubstr("authorization"));
 }
 
+TEST_F(RequestsDispatcherTests, ProcessRequestReturnsUnsupportedClientFailure) {
+    MockHandleRequest(1, asapo::ReceiverErrorTemplates::kUnsupportedClient.Generate());
+    MockSendResponse(&response, false);
+
+    auto err = dispatcher->ProcessRequest(request);
+
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kUnsupportedClient));
+    ASSERT_THAT(response.error_code, Eq(asapo::kNetErrorNotSupported));
+    ASSERT_THAT(std::string(response.message), HasSubstr("supported"));
+}
+
 TEST_F(RequestsDispatcherTests, ProcessRequestReturnsReAuthorizationFailure) {
     MockHandleRequest(2, asapo::ReceiverErrorTemplates::kReAuthorizationFailure.Generate());
     MockSendResponse(&response, false);
diff --git a/receiver/unittests/statistics/test_receiver_statistics.cpp b/receiver/unittests/statistics/test_receiver_statistics.cpp
index de92258447f6d8788ef8fca98f07391cf63a5bfd..b64fc20b0834011bdaacb4ec6bd4712fe930e331 100644
--- a/receiver/unittests/statistics/test_receiver_statistics.cpp
+++ b/receiver/unittests/statistics/test_receiver_statistics.cpp
@@ -120,15 +120,11 @@ TEST_F(ReceiverStatisticTests, TimerForAll) {
 
     auto stat = ExtractStat();
 
+    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDatabase].second, Gt(0));
 
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDatabase].second, Ge(0.15));
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDatabase].second, Le(0.25));
+    ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Gt(0));
 
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Ge(0.25));
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kNetwork].second, Le(0.40));
-
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Ge(0.3));
-    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Le(0.45));
+    ASSERT_THAT(stat.extra_entities[StatisticEntity::kDisk].second, Gt(0));
 }
 
 }
diff --git a/receiver/unittests/statistics/test_statistics.cpp b/receiver/unittests/statistics/test_statistics.cpp
index 4f871e4118a19888ce2acb345e7e5edf7860966d..d31c2282f79c87b6c5d5ce4800888a0495443f69 100644
--- a/receiver/unittests/statistics/test_statistics.cpp
+++ b/receiver/unittests/statistics/test_statistics.cpp
@@ -7,6 +7,9 @@
 #include "../../src/statistics/statistics_sender_influx_db.h"
 #include "../../src/statistics/statistics_sender_fluentd.h"
 #include "../receiver_mocking.h"
+#include "../../src/receiver_config.h"
+#include "../../src/receiver_config_factory.h"
+#include "../mock_receiver_config.h"
 
 using ::testing::Test;
 using ::testing::Gt;
@@ -41,6 +44,9 @@ class StatisticTests : public Test {
     Statistics statistics{0};
     MockStatisticsSender mock_statistics_sender;
     void SetUp() override {
+        asapo::ReceiverConfig test_config;
+        test_config.monitor_performance = true;
+        asapo::SetReceiverConfig(test_config, "none");
         statistics.statistics_sender_list__.clear();
         statistics.statistics_sender_list__.emplace_back(&mock_statistics_sender);
     }
@@ -156,6 +162,16 @@ TEST_F(StatisticTests, SendStaticsDoesCallsSender) {
 }
 
 
+TEST_F(StatisticTests, DoNotSendStatistics) {
+    asapo::ReceiverConfig test_config;
+    test_config.monitor_performance = false;
+    asapo::SetReceiverConfig(test_config, "none");
+
+    EXPECT_CALL(mock_statistics_sender, SendStatistics_t(_)).Times(0);
+
+    statistics.SendIfNeeded(true);
+}
+
 TEST_F(StatisticTests, StatisticsSend) {
     statistics.IncreaseRequestCounter();
 
diff --git a/receiver/unittests/test_config.cpp b/receiver/unittests/test_config.cpp
index e654b439b2a44e231fd8405f034cd467830958d5..06c7c2ddf1f26860b168618ce6f48c4bfdb3d7c1 100644
--- a/receiver/unittests/test_config.cpp
+++ b/receiver/unittests/test_config.cpp
@@ -48,6 +48,7 @@ class ConfigTests : public Test {
         test_config.tag = "receiver1";
         test_config.performance_db_name = "db_test";
         test_config.performance_db_uri = "localhost:8086";
+        test_config.monitor_performance = true;
         test_config.database_uri = "localhost:27017";
         test_config.log_level = asapo::LogLevel::Error;
         test_config.authorization_interval_ms = 10000;
@@ -84,6 +85,7 @@ TEST_F(ConfigTests, ReadSettings) {
     ASSERT_THAT(config->log_level, Eq(asapo::LogLevel::Error));
     ASSERT_THAT(config->tag, Eq("receiver1"));
     ASSERT_THAT(config->use_datacache, Eq(false));
+    ASSERT_THAT(config->monitor_performance, Eq(true));
     ASSERT_THAT(config->datacache_reserved_share, Eq(10));
     ASSERT_THAT(config->datacache_size_gb, Eq(2));
     ASSERT_THAT(config->discovery_server, Eq("discovery"));
@@ -104,7 +106,7 @@ TEST_F(ConfigTests, ErrorReadSettings) {
     std::vector<std::string>fields {"PerformanceDbServer", "ListenPort", "DataServer", "ListenPort",
                                     "DataCache", "Use", "SizeGB", "ReservedShare", "DatabaseServer", "Tag",
                                     "AuthorizationServer", "AuthorizationInterval", "PerformanceDbName", "LogLevel",
-                                    "NThreads", "DiscoveryServer", "AdvertiseURI", "NetworkMode",
+                                    "NThreads", "DiscoveryServer", "AdvertiseURI", "NetworkMode","MonitorPerformance",
                                     "ReceiveToDiskThresholdMB"};
     for (const auto& field : fields) {
         auto err = asapo::SetReceiverConfig(test_config, field);
diff --git a/receiver/unittests/test_request.cpp b/receiver/unittests/test_request.cpp
index cea29e89dc78ee98dc004c9452cb33ac1ca40e96..00d724c03d6b1522b7fecb9133f19a363f95c565 100644
--- a/receiver/unittests/test_request.cpp
+++ b/receiver/unittests/test_request.cpp
@@ -83,6 +83,7 @@ class RequestTests : public Test {
     uint64_t expected_metadata_size = expected_metadata.size();
     asapo::Opcode expected_op_code = asapo::kOpcodeTransferData;
     char expected_request_message[asapo::kMaxMessageSize] = "test_message";
+    std::string expected_api_version = "v0.1";
     std::unique_ptr<Request> request;
     NiceMock<MockIO> mock_io;
     NiceMock<MockStatistics> mock_statistics;
@@ -96,6 +97,7 @@ class RequestTests : public Test {
         generic_request_header.op_code = expected_op_code;
         generic_request_header.custom_data[asapo::kPosIngestMode] = asapo::kDefaultIngestMode;
         strcpy(generic_request_header.message, expected_request_message);
+        strcpy(generic_request_header.api_version, expected_api_version.c_str());
         request.reset(new Request{generic_request_header, expected_socket_id, expected_origin_uri, nullptr, nullptr});
         request->io__ = std::unique_ptr<asapo::IO> {&mock_io};
         ON_CALL(mock_io, Receive_t(expected_socket_id, _, data_size_, _)).WillByDefault(
@@ -157,6 +159,11 @@ TEST_F(RequestTests, GetRequestMessage) {
     ASSERT_THAT(message, testing::StrEq(expected_request_message));
 }
 
+TEST_F(RequestTests, GetApiVersion) {
+    auto ver = request->GetApiVersion();
+    ASSERT_THAT(ver, testing::Eq(expected_api_version));
+}
+
 
 TEST_F(RequestTests, GetOriginUri) {
     auto uri = request->GetOriginUri();
diff --git a/tests/automatic/authorizer/check_authorize/CMakeLists.txt b/tests/automatic/authorizer/check_authorize/CMakeLists.txt
index ef88646421bc885f00e27bb0069094613c3cfc71..9847c8707a00b9cb64cf8241a3600e8b96e46772 100644
--- a/tests/automatic/authorizer/check_authorize/CMakeLists.txt
+++ b/tests/automatic/authorizer/check_authorize/CMakeLists.txt
@@ -15,10 +15,10 @@ if (WIN32)
 endif()
 
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
+configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret_admin.key auth_secret_admin.key COPYONLY)
 
 configure_file(beamtime-metadata-11111111.json beamtime-metadata-11111111.json COPYONLY)
 
-
 configure_file(${CMAKE_CURRENT_SOURCE_DIR}/settings.json.in settings.json @ONLY)
 add_script_test("${TARGET_NAME}-authorize" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME>" nomem
         )
diff --git a/tests/automatic/authorizer/check_authorize/check_linux.sh b/tests/automatic/authorizer/check_authorize/check_linux.sh
index 0471f37aece486d24c9823772231f960dc787fb4..5eb25bcc32c255a757fe846b25b9c34676cf79fd 100644
--- a/tests/automatic/authorizer/check_authorize/check_linux.sh
+++ b/tests/automatic/authorizer/check_authorize/check_linux.sh
@@ -19,28 +19,44 @@ mkdir -p asap3/petra3/gpfs/p00/2019/data/11000015
 mkdir -p beamline/p07/current
 cp beamtime-metadata* beamline/p07/current/
 
+
+#tokens
+AdminToken=$ASAPO_CREATE_TOKEN
+echo admin $AdminToken
+
+curl -v --silent -H "Authorization: Bearer $AdminToken" --data '{"Subject": {"beamtimeId":"12345678"},"DaysValid":123,"AccessType":["read"]}' 127.0.0.1:5007/admin/issue --stderr -  | tee /dev/stderr | grep "bt_12345678"
+curl -v --silent -H "Authorization: Bearer blabla" --data '{"Subject": {"beamtimeId":"12345678"},"DaysValid":123,"AccessType":["read"]}' 127.0.0.1:5007/admin/issue --stderr -  | tee /dev/stderr | grep "token does not match"
+
 curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%detector%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep c20180508-000-COM20181
 curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%detector%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p00
 curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%detector%","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep detector
 
-token=onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90= #token for c20180508-000-COM20181
-curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%detector%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep detector
-curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%auto%detector%onm80KQF8s6d2p_laW0S5IYanUUsLcnB3QO-6QQ1M90=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p00
+token=$C20180508_000_COM20181_TOKEN
+
+curl -v --silent --data "{\"SourceCredentials\":\"processed%c20180508-000-COM20181%%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep detector
+curl -v --silent --data "{\"SourceCredentials\":\"processed%c20180508-000-COM20181%auto%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p00
 curl -v --silent --data '{"SourceCredentials":"processed%c20180508-000-COM20181%%detector%bla","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
 
-token=dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0= #token for 11000015
+token=$BT11000015_TOKEN
 #beamtine not online
-curl -v --silent --data '{"SourceCredentials":"raw%11000015%%detector%dccMd3NT89i32Whz7yD4VQhmEJy6Kxc35wsBbWJLXp0=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+curl -v --silent --data "{\"SourceCredentials\":\"raw%11000015%%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+
+token=$BT11000016_TOKEN
+curl -v --silent --data "{\"SourceCredentials\":\"raw%11000016%%detector%${token}\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+
 
-token=Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs= #token for 11000016
-curl -v --silent --data '{"SourceCredentials":"raw%11000016%%detector%Jaas_xTpkB0Zy5dFwjs4kCrY7yXMfbnW8Ca1aYhyKBs=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 401
+token=$BLP07_TOKEN
 
+curl -v --silent --data "{\"SourceCredentials\":\"processed%auto%p07%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
+curl -v --silent --data "{\"SourceCredentials\":\"raw%auto%p07%detector%$token\",\"OriginHost\":\"127.0.0.1:5007\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
+curl -v --silent --data "{\"SourceCredentials\":\"raw%auto%p07%detector%$token\",\"OriginHost\":\"127.0.0.1:5007\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
+curl -v --silent --data "{\"SourceCredentials\":\"raw%auto%p07%detector%$token\",\"OriginHost\":\"127.0.0.1:5007\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
 
-token=-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4= # for bl_p07
-curl -v --silent --data '{"SourceCredentials":"processed%auto%p07%detector%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"bla"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
-curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%detector%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep 11111111
-curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%detector%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep p07
-curl -v --silent --data '{"SourceCredentials":"raw%auto%p07%detector%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=","OriginHost":"127.0.0.1:5007"}' 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep /asap3/petra3/gpfs/p07/2020/data/11111111
+#read access
+curl -v --silent --data "{\"SourceCredentials\":\"processed%auto%p07%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr - | tee /dev/stderr  | grep read
 
+#write access
+token=$BLP07_W_TOKEN
+curl -v --silent --data "{\"SourceCredentials\":\"processed%auto%p07%detector%$token\",\"OriginHost\":\"bla\"}" 127.0.0.1:5007/authorize --stderr -  | tee /dev/stderr  | grep write
 
 rm -rf asap3 beamline
\ No newline at end of file
diff --git a/tests/automatic/authorizer/check_authorize/check_windows.bat b/tests/automatic/authorizer/check_authorize/check_windows.bat
index c90cce0d6993493cea730a1cd5226ef6cc021018..866756d98356dcad5f39421c6882d98947ea3bd8 100644
--- a/tests/automatic/authorizer/check_authorize/check_windows.bat
+++ b/tests/automatic/authorizer/check_authorize/check_windows.bat
@@ -15,7 +15,8 @@ C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"processed%%c20180
 
 C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%%c20180508-000-COM20181%%%%detector%%wrong\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 401  || goto :error
 
-C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%%auto%%p07%%detector%%-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
+set token=%BLP07_TOKEN%
+C:\Curl\curl.exe -v  --silent --data "{\"SourceCredentials\":\"raw%%auto%%p07%%detector%%%token%\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr 11111111  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/authorizer/check_authorize/settings.json.in b/tests/automatic/authorizer/check_authorize/settings.json.in
index 51d5bfc3df247a96a4382d942bdbd372bb2dd497..13950ebea0ad1bc146c59042780fd02020c150a5 100644
--- a/tests/automatic/authorizer/check_authorize/settings.json.in
+++ b/tests/automatic/authorizer/check_authorize/settings.json.in
@@ -3,7 +3,8 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "Ldap":
     {
         "Uri" : "ldap://localhost:389",
diff --git a/tests/automatic/broker/check_monitoring/CMakeLists.txt b/tests/automatic/broker/check_monitoring/CMakeLists.txt
index e5d07dee9d13103e2f4d7edfd5f8132b48407a50..05ef150fd611bf31a545ec5573b3acfc4c0db959 100644
--- a/tests/automatic/broker/check_monitoring/CMakeLists.txt
+++ b/tests/automatic/broker/check_monitoring/CMakeLists.txt
@@ -3,8 +3,8 @@ set(TARGET_NAME asapo-broker)
 ################################
 # Testing
 ################################
+prepare_asapo()
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY)
-configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
 
 add_script_test("${TARGET_NAME}-monitoring" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem
         )
diff --git a/tests/automatic/broker/check_monitoring/check_linux.sh b/tests/automatic/broker/check_monitoring/check_linux.sh
index caf55e77f9b566a57e8b945639adf54b45c861a1..35ab4eb7cfe57f6c7243b3fb2a9a99aee36e3191 100644
--- a/tests/automatic/broker/check_monitoring/check_linux.sh
+++ b/tests/automatic/broker/check_monitoring/check_linux.sh
@@ -11,12 +11,21 @@ Cleanup() {
 	echo cleanup
 	influx -execute "drop database ${database_name}"
 	kill -9 $brokerid
+  nomad stop nginx
+  nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
+  nomad stop authorizer
 }
 
 ! influx -execute "drop database ${database_name}"
 
 
-token=`$2 token -secret auth_secret.key data`
+nomad run nginx.nmd
+nomad run authorizer.nmd
+sleep 1
+
+
+token=$BT_DATA_TOKEN
+
 
 $1 -config settings.json &
 
@@ -24,16 +33,16 @@ sleep 0.3
 
 brokerid=`echo $!`
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
 
 
 for i in `seq 1 50`;
 do
-    curl --silent 127.0.0.1:5005/database/data/source/stream/${groupid}/next?token=$token >/dev/null 2>&1 &
+    curl --silent 127.0.0.1:5005/v0.1/beamtime/data/source/stream/${groupid}/next?token=$token >/dev/null 2>&1 &
 done
 
 
-sleep 3
+sleep 12
 
 influx -execute "select sum(rate) from RequestsRate" -database=${database_name} -format=json | jq .results[0].series[0].values[0][1] | tee /dev/stderr | grep 51
 
diff --git a/tests/automatic/broker/get_last/CMakeLists.txt b/tests/automatic/broker/get_last/CMakeLists.txt
index d9f88dc2d467b13ad24b542abfe7235d0c7cfd93..3b688f5db85a0a377f372e734cbd66264737768f 100644
--- a/tests/automatic/broker/get_last/CMakeLists.txt
+++ b/tests/automatic/broker/get_last/CMakeLists.txt
@@ -3,8 +3,8 @@ set(TARGET_NAME asapo-broker)
 ################################
 # Testing
 ################################
+prepare_asapo()
+
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY)
-configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
 
-add_script_test("${TARGET_NAME}-getlast" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem
-        )
+add_script_test("${TARGET_NAME}-getlast" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem)
diff --git a/tests/automatic/broker/get_last/check_linux.sh b/tests/automatic/broker/get_last/check_linux.sh
index a721a07b225b5a56241819c7dc80c22386438d20..842fc91a96ccef7259992f2a555800da5742e8cb 100644
--- a/tests/automatic/broker/get_last/check_linux.sh
+++ b/tests/automatic/broker/get_last/check_linux.sh
@@ -11,12 +11,19 @@ Cleanup() {
 	echo cleanup
 	echo "db.dropDatabase()" | mongo ${database_name}
 	kill -9 $brokerid
+  nomad stop nginx
+  nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
+  nomad stop authorizer
 }
 
 echo "db.data_${stream}.insert({"_id":2})" | mongo ${database_name}
 echo "db.data_${stream}.insert({"_id":1})" | mongo ${database_name}
 
-token=`$2 token -secret auth_secret.key data`
+token=$BT_DATA_TOKEN
+
+nomad run nginx.nmd
+nomad run authorizer.nmd
+sleep 1
 
 $1 -config settings.json &
 
@@ -24,23 +31,23 @@ sleep 0.3
 brokerid=`echo $!`
 
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
 
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr -
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr -
 
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":2'
 
 echo "db.data_${stream}.insert({"_id":3})" | mongo ${database_name}
 
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":3'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":3'
 
 echo "db.data_${stream}.insert({"_id":4})" | mongo ${database_name}
 
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
 
 #with a new group
-groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/database/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
\ No newline at end of file
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/${groupid}/next?token=$token --stderr - | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/${stream}/0/last?token=$token --stderr - | grep '"_id":4'
\ No newline at end of file
diff --git a/tests/automatic/broker/get_last/check_windows.bat b/tests/automatic/broker/get_last/check_windows.bat
index bc2cfdada576cdb8c12ff416df8f59716dccc274..497384df3d5af7eb9fac5d7419042187ec850066 100644
--- a/tests/automatic/broker/get_last/check_windows.bat
+++ b/tests/automatic/broker/get_last/check_windows.bat
@@ -7,32 +7,36 @@ echo db.data_default.insert({"_id":2}) | %mongo_exe% %database_name%  || goto :e
 set full_name="%1"
 set short_name="%~nx1"
 
-"%2" token -secret auth_secret.key data > token
-set /P token=< token
-
+c:\opt\consul\nomad run authorizer.nmd
+c:\opt\consul\nomad run nginx.nmd
 start /B "" "%full_name%" -config settings.json
-ping 192.0.2.1 -n 1 -w 1000 > nul
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
+ping 192.0.2.1 -n 1 -w 5000 > nul
+
+
+set token=%BT_DATA_TOKEN%
+
+
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
 set /P groupid=< groupid
 
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
 
 echo db.data_default.insert({"_id":3}) | %mongo_exe% %database_name%  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":3  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":3  || goto :error
 
 echo db.data_default.insert({"_id":4}) | %mongo_exe% %database_name%  || goto :error
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
 
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/last?token=%token% --stderr - | findstr /c:\"_id\":4  || goto :error
 
 
 goto :clean
@@ -44,5 +48,7 @@ exit /b 1
 :clean
 Taskkill /IM "%short_name%" /F
 echo db.dropDatabase() | %mongo_exe% %database_name%
-del /f token
-del /f groupid
\ No newline at end of file
+del /f groupid
+c:\opt\consul\nomad stop authorizer
+c:\opt\consul\nomad stop nginx
+c:\opt\consul\nomad run nginx_kill.nmd  && c:\opt\consul\nomad stop -yes -purge nginx_kill
diff --git a/tests/automatic/broker/get_meta/CMakeLists.txt b/tests/automatic/broker/get_meta/CMakeLists.txt
index 3888fa09d1101324253c44864d9fcdaf6907fe18..dc491f8bf71ff293f831de25273b189a82d2e253 100644
--- a/tests/automatic/broker/get_meta/CMakeLists.txt
+++ b/tests/automatic/broker/get_meta/CMakeLists.txt
@@ -3,8 +3,9 @@ set(TARGET_NAME asapo-broker)
 ################################
 # Testing
 ################################
+prepare_asapo()
+
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY)
-configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
 
 add_script_test("${TARGET_NAME}-getmeta" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem
         )
diff --git a/tests/automatic/broker/get_meta/check_linux.sh b/tests/automatic/broker/get_meta/check_linux.sh
index 57b2e3335ff8e43ee8b071d4b68b1620fa1caa6e..ba084cc02db68612ef667a54c4859060c512f17e 100644
--- a/tests/automatic/broker/get_meta/check_linux.sh
+++ b/tests/automatic/broker/get_meta/check_linux.sh
@@ -10,17 +10,25 @@ Cleanup() {
 	echo cleanup
 	echo "db.dropDatabase()" | mongo ${database_name}
 	kill -9 $brokerid
+  nomad stop nginx
+  nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
+  nomad stop authorizer
 }
 
 echo 'db.meta.insert({"_id":0,"data":"test"})' | mongo ${database_name}
 
-token=`$2 token -secret auth_secret.key test`
+token=$BT_TEST_TOKEN
+
+nomad run nginx.nmd
+nomad run authorizer.nmd
+sleep 1
+
 
 $1 -config settings.json &
 
 sleep 0.3
 brokerid=`echo $!`
 
-curl -v  --silent 127.0.0.1:5005/database/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test"'
-curl -v  --silent 127.0.0.1:5005/database/test/detector/default/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep 'no documents'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/test/detector/default/0/meta/0?token=$token --stderr - | tee /dev/stderr | grep '"data":"test"'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/test/detector/default/0/meta/1?token=$token --stderr - | tee /dev/stderr | grep 'no documents'
 
diff --git a/tests/automatic/broker/get_meta/check_windows.bat b/tests/automatic/broker/get_meta/check_windows.bat
index dc39360ad6fbd96ecd35bf7aa8d8e27766ea63e8..ee02ec0a23cff2fd68a12b8e45fb171693c460a6 100644
--- a/tests/automatic/broker/get_meta/check_windows.bat
+++ b/tests/automatic/broker/get_meta/check_windows.bat
@@ -6,15 +6,17 @@ echo db.meta.insert({"_id":0}) | %mongo_exe% %database_name%  || goto :error
 set full_name="%1"
 set short_name="%~nx1"
 
-"%2" token -secret auth_secret.key data > token
-set /P token=< token
-
+c:\opt\consul\nomad run authorizer.nmd
+c:\opt\consul\nomad run nginx.nmd
 start /B "" "%full_name%" -config settings.json
 
-ping 192.0.2.1 -n 1 -w 1000 > nul
+ping 192.0.2.1 -n 1 -w 5000 > nul
+
+set token=%BT_DATA_TOKEN%
+
 
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"_id\":0  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/0/meta/1?token=%token% --stderr - | findstr /c:"no documents"  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/meta/0?token=%token% --stderr - | findstr /c:\"_id\":0  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/detector/default/0/meta/1?token=%token% --stderr - | findstr /c:"no documents"  || goto :error
 
 
 goto :clean
@@ -26,5 +28,7 @@ exit /b 1
 :clean
 Taskkill /IM "%short_name%" /F
 echo db.dropDatabase() | %mongo_exe% %database_name%
-del /f token
-del /f groupid
\ No newline at end of file
+del /f groupid
+c:\opt\consul\nomad stop authorizer
+c:\opt\consul\nomad stop nginx
+c:\opt\consul\nomad run nginx_kill.nmd  && c:\opt\consul\nomad stop -yes -purge nginx_kill
diff --git a/tests/automatic/broker/get_next/CMakeLists.txt b/tests/automatic/broker/get_next/CMakeLists.txt
index 7a87036704e2e71ad5318e6c5cb978277a495b7b..1eba288a03006af23d30193982fb3fda054548a1 100644
--- a/tests/automatic/broker/get_next/CMakeLists.txt
+++ b/tests/automatic/broker/get_next/CMakeLists.txt
@@ -3,8 +3,9 @@ set(TARGET_NAME asapo-broker)
 ################################
 # Testing
 ################################
+prepare_asapo()
+
 configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY)
-configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/auth_secret.key auth_secret.key COPYONLY)
 
 add_script_test("${TARGET_NAME}-getnext" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem
         )
diff --git a/tests/automatic/broker/get_next/check_linux.sh b/tests/automatic/broker/get_next/check_linux.sh
index 277f78895f75804530199be5f17a3856bbbe9a63..80bb2312f4fcd83c0bb4d0b4c582709785ec2ef5 100644
--- a/tests/automatic/broker/get_next/check_linux.sh
+++ b/tests/automatic/broker/get_next/check_linux.sh
@@ -11,23 +11,31 @@ Cleanup() {
 	echo cleanup
 	echo "db.dropDatabase()" | mongo ${database_name}
 	kill -9 $brokerid
+  nomad stop nginx
+  nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
+  nomad stop authorizer
 }
 
 echo "db.data_${stream}.insert({"_id":2})" | mongo ${database_name}
 echo "db.data_${stream}.insert({"_id":1})" | mongo ${database_name}
 
-token=`$2 token -secret auth_secret.key data`
+token=$BT_DATA_TOKEN
+
+nomad run nginx.nmd
+nomad run authorizer.nmd
+sleep 1
+
 
 $1 -config settings.json &
 
 sleep 0.3
 brokerid=`echo $!`
 
-groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
-curl -v  --silent 127.0.0.1:5005/database/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":1'
-curl -v  --silent 127.0.0.1:5005/database/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":2'
-curl -v  --silent 127.0.0.1:5005/database/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"id_max":2'
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":1'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"_id":2'
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr  | grep '"id_max":2'
 
 # with a new group
-groupid=`curl -d '' --silent 127.0.0.1:5005/creategroup`
-curl -v  --silent 127.0.0.1:5005/database/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr | grep '"_id":1'
\ No newline at end of file
+groupid=`curl -d '' --silent 127.0.0.1:5005/v0.1/creategroup`
+curl -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/${stream}/${groupid}/next?token=$token --stderr - | tee /dev/stderr | grep '"_id":1'
\ No newline at end of file
diff --git a/tests/automatic/broker/get_next/check_windows.bat b/tests/automatic/broker/get_next/check_windows.bat
index 0f3962bc4b44d9bc89267a8f2cce0728ccd79df7..89ba33faa721e4c8267d6ae026b5142f1691d140 100644
--- a/tests/automatic/broker/get_next/check_windows.bat
+++ b/tests/automatic/broker/get_next/check_windows.bat
@@ -1,4 +1,4 @@
-SET database_name=data_detector
+SET database_name=data_source
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
 
 echo db.data_default.insert({"_id":1}) | %mongo_exe% %database_name%  || goto :error
@@ -7,22 +7,23 @@ echo db.data_default.insert({"_id":2}) | %mongo_exe% %database_name%  || goto :e
 set full_name="%1"
 set short_name="%~nx1"
 
-"%2" token -secret auth_secret.key data > token
-set /P token=< token
+set token=%BT_DATA_TOKEN%
 
+c:\opt\consul\nomad run authorizer.nmd
+c:\opt\consul\nomad run nginx.nmd
 start /B "" "%full_name%" -config settings.json
 
-ping 192.0.2.1 -n 1 -w 1000 > nul
+ping 192.0.2.1 -n 1 -w 5000 > nul
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr  /c:\"id_max\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":2  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr  /c:\"id_max\":2  || goto :error
 
-C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/creategroup > groupid
+C:\Curl\curl.exe -d '' --silent 127.0.0.1:5005/v0.1/creategroup > groupid
 set /P groupid=< groupid
-C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/database/data/detector/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
+C:\Curl\curl.exe -v  --silent 127.0.0.1:5005/v0.1/beamtime/data/source/default/%groupid%/next?token=%token% --stderr - | findstr /c:\"_id\":1  || goto :error
 
 goto :clean
 
@@ -34,4 +35,7 @@ exit /b 1
 Taskkill /IM "%short_name%" /F
 echo db.dropDatabase() | %mongo_exe% %database_name%
 del /f token
-del /f groupid
\ No newline at end of file
+del /f groupid
+c:\opt\consul\nomad stop authorizer
+c:\opt\consul\nomad stop nginx
+c:\opt\consul\nomad run nginx_kill.nmd  && c:\opt\consul\nomad stop -yes -purge nginx_kill
diff --git a/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh b/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
index 21f6774fb85e118e2cf53a12447be4970b3c58f8..f06e415503d8880ec671ad4cb9523bdeab92aab5 100644
--- a/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
+++ b/tests/automatic/bug_fixes/consumer_python_memleak/check_linux.sh
@@ -6,7 +6,8 @@ trap Cleanup EXIT
 endpoint=127.0.0.1:8400
 path=.
 beamtime_id=asapo_test
-token="IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk="
+#asapo_test read token
+token=$ASAPO_TEST_RW_TOKEN
 
 
 Cleanup() {
diff --git a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
index c71a4d9fae13c1927ddfd5c73c9579208ebf1e38..b039195e0a103dc00763d16a212d545d7c804b4f 100644
--- a/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
+++ b/tests/automatic/bug_fixes/producer_send_after_restart/check_windows.bat
@@ -8,16 +8,15 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
 REM producer
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in\processed
diff --git a/tests/automatic/common_scripts/start_services.bat b/tests/automatic/common_scripts/start_services.bat
index e27bc8ccff35a4b9c29c4ea6fbd7acdeae89870a..cbe15b437378469b298ce4355f6bd4ffa592ef4b 100644
--- a/tests/automatic/common_scripts/start_services.bat
+++ b/tests/automatic/common_scripts/start_services.bat
@@ -16,7 +16,7 @@ if %i% EQU 20 (
     goto :error
 )
 ping 192.0.2.1 -n 1 -w 1000 >nul
-curl --silent --fail 127.0.0.1:8400/asapo-discovery/asapo-receiver --stderr - | findstr 127.0.0.1  || goto :repeat
-curl --silent --fail 127.0.0.1:8400/asapo-discovery/asapo-broker --stderr - | findstr 127.0.0.1 || goto :repeat
-curl --silent --fail 127.0.0.1:8400/asapo-discovery/asapo-file-transfer --stderr -  | findstr 127.0.0.1 || goto :repeat
+curl --silent --fail 127.0.0.1:8400/asapo-discovery/v0.1/asapo-receiver?protocol=v0.1 --stderr - | findstr 127.0.0.1  || goto :repeat
+curl --silent --fail 127.0.0.1:8400/asapo-discovery/v0.1/asapo-broker?protocol=v0.1 --stderr - | findstr 127.0.0.1 || goto :repeat
+curl --silent --fail 127.0.0.1:8400/asapo-discovery/v0.1/asapo-file-transfer?protocol=v0.1 --stderr -  | findstr 127.0.0.1 || goto :repeat
 echo discovery ready
diff --git a/tests/automatic/consumer/consumer_api/check_linux.sh b/tests/automatic/consumer/consumer_api/check_linux.sh
index 3fb2718ca677c12d23096a68694338b0e3911f70..7e0f342a598a82b8de05a33e95ad617a981cbce8 100644
--- a/tests/automatic/consumer/consumer_api/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api/check_linux.sh
@@ -3,7 +3,9 @@
 beamtime_id=test_run
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+token_test_run=$BT_TEST_RUN_TOKEN
+
+
 
 set -e
 
@@ -14,6 +16,7 @@ Cleanup() {
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop discovery
+    nomad stop authorizer
     nomad stop broker
     echo "db.dropDatabase()" | mongo ${database_name}
 	rm -f 1_1 1
@@ -22,6 +25,7 @@ Cleanup() {
 
 nomad run nginx.nmd
 nomad run discovery.nmd
+nomad run authorizer.nmd
 nomad run broker.nmd
 
 sleep 1
@@ -36,11 +40,13 @@ do
 	echo 'db.data_stream1.insert({"_id":'$i',"size":6,"name":"'1$i'","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
 done
 
+echo 'db.data_stream1.insert({"_id":'6',"size":0,"name":"asapo_finish_stream","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"ns"}})' | mongo ${database_name}
+
 for i in `seq 1 5`;
 do
 	echo 'db.data_stream2.insert({"_id":'$i',"size":6,"name":"'2$i'","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
 done
-
+echo 'db.data_stream2.insert({"_id":'6',"size":0,"name":"asapo_finish_stream","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"asapo_no_next"}})' | mongo ${database_name}
 
 echo hello1 > 1
 
diff --git a/tests/automatic/consumer/consumer_api/check_windows.bat b/tests/automatic/consumer/consumer_api/check_windows.bat
index 19e163518f92fa6ea3bfc202608cd8b4e33c7174..9e3be222df17dbb674d4c7312abc02fe46a584cc 100644
--- a/tests/automatic/consumer/consumer_api/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api/check_windows.bat
@@ -4,7 +4,7 @@ SET data_source=detector
 
 SET database_name=%beamtime_id%_%data_source%
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+set token_test_run=%BT_TEST_RUN_TOKEN%
 
 call start_services.bat
 
@@ -12,7 +12,13 @@ for /l %%x in (1, 1, 10) do echo db.data_default.insert({"_id":%%x,"size":6,"nam
 
 for /l %%x in (1, 1, 5) do echo db.data_stream1.insert({"_id":%%x,"size":6,"name":"1%%x","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
 
+echo db.data_stream1.insert({"_id":6,"size":0,"name":"asapo_finish_stream","timestamp":1000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"ns"}}) | %mongo_exe% %database_name%  || goto :error
+
 for /l %%x in (1, 1, 5) do echo db.data_stream2.insert({"_id":%%x,"size":6,"name":"2%%x","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
+
+echo db.data_stream2.insert({"_id":6,"size":0,"name":"asapo_finish_stream","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"asapo_no_next"}}) | %mongo_exe% %database_name%  || goto :error
+
+
 echo hello1 > 1
 
 
diff --git a/tests/automatic/consumer/consumer_api/consumer_api.cpp b/tests/automatic/consumer/consumer_api/consumer_api.cpp
index e451289ed76e159ef3d4e8d7df80bfb54836fd9f..a6f889c2abb8d5943c6c68bb8153a7db9e8e16ea 100644
--- a/tests/automatic/consumer/consumer_api/consumer_api.cpp
+++ b/tests/automatic/consumer/consumer_api/consumer_api.cpp
@@ -31,6 +31,14 @@ void TestSingle(const std::unique_ptr<asapo::Consumer>& consumer, const std::str
     asapo::MessageMeta fi;
     asapo::Error err;
 
+    std::string client,server;
+    bool supported;
+    err = consumer->GetVersionInfo(&client,&server,&supported);
+    M_AssertTrue(err == nullptr, "Version OK");
+    M_AssertTrue(supported, "client supported by server");
+    M_AssertTrue(!client.empty(), "client version");
+    M_AssertTrue(!server.empty(), "server version");
+
     err = consumer->GetNext(group_id, &fi, nullptr, "default");
     if (err) {
         std::cout << err->Explain() << std::endl;
@@ -84,6 +92,14 @@ void TestSingle(const std::unique_ptr<asapo::Consumer>& consumer, const std::str
     M_AssertTrue(err == nullptr, "GetCurrentSize no error");
     M_AssertTrue(size == 10, "GetCurrentSize size");
 
+    auto size1 = consumer->GetCurrentSize("stream1", &err);
+    M_AssertTrue(err == nullptr, "GetCurrentSize 1 no error");
+    M_AssertTrue(size1 == 5, "GetCurrentSize 1 size");
+
+    auto size2 = consumer->GetCurrentSize("stream2", &err);
+    M_AssertTrue(err == nullptr, "GetCurrentSize 2 no error");
+    M_AssertTrue(size2 == 5, "GetCurrentSize 2 size");
+
     err = consumer->ResetLastReadMarker(group_id,"default");
     M_AssertTrue(err == nullptr, "SetLastReadMarker");
 
@@ -118,7 +134,6 @@ void TestSingle(const std::unique_ptr<asapo::Consumer>& consumer, const std::str
     M_AssertTrue(err != nullptr, "query5");
     M_AssertTrue(messages.size() == 0, "size of query answer 5");
 
-
 //streams
 
     err = consumer->GetNext(group_id, &fi, nullptr, "stream1");
@@ -133,19 +148,22 @@ void TestSingle(const std::unique_ptr<asapo::Consumer>& consumer, const std::str
     M_AssertTrue(err == nullptr, "GetNext stream2 no error");
     M_AssertTrue(fi.name == "21", "GetNext stream2 filename");
 
-    auto streams = consumer->GetStreamList("",&err);
+    auto streams = consumer->GetStreamList("",asapo::StreamFilter::kAllStreams,&err);
     M_AssertTrue(err == nullptr, "GetStreamList no error");
     M_AssertTrue(streams.size() == 3, "streams.size");
-    M_AssertTrue(streams[0].name == "default", "streams0.name1");
-    M_AssertTrue(streams[1].name == "stream1", "streams1.name2");
-    M_AssertTrue(streams[2].name == "stream2", "streams2.name3");
-    std::cout<<streams[0].Json(false)<<std::endl;
-    std::cout<<streams[1].Json(false)<<std::endl;
-    std::cout<<streams[2].Json(false)<<std::endl;
+    M_AssertTrue(streams[0].name == "default", "streams0.name");
+    M_AssertTrue(streams[1].name == "stream1", "streams1.name");
+    M_AssertTrue(streams[2].name == "stream2", "streams2.name");
+    M_AssertTrue(streams[1].finished == true, "stream1 finished");
+    M_AssertTrue(streams[1].next_stream == "ns", "stream1 next stream");
+    M_AssertTrue(streams[2].finished == true, "stream2 finished");
+    M_AssertTrue(streams[2].next_stream == "", "stream2 no next stream");
     M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[0].timestamp_created) == 0, "streams0.timestamp");
-    M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[0].timestamp_lastentry) == 0, "streams0.timestamp lastentry not set");
+    M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[0].timestamp_lastentry) == 0, "streams0.timestamp lastentry");
     M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[1].timestamp_created) == 1000, "streams1.timestamp");
+    M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[1].timestamp_lastentry) == 1000, "streams1.timestamp lastentry");
     M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[2].timestamp_created) == 2000, "streams2.timestamp");
+    M_AssertTrue(asapo::NanosecsEpochFromTimePoint(streams[2].timestamp_lastentry) == 2000, "streams2.timestamp lastentry");
 // acknowledges
 
     auto id = consumer->GetLastAcknowledgedMessage(group_id,"default", &err);
@@ -238,6 +256,11 @@ void TestDataset(const std::unique_ptr<asapo::Consumer>& consumer, const std::st
     M_AssertTrue(err == nullptr, "GetDatasetById error");
     M_AssertTrue(dataset.content[2].name == "8_3", "GetDatasetById filename");
 
+    auto size = consumer->GetCurrentDatasetCount("default", false, &err);
+    M_AssertTrue(err == nullptr, "GetCurrentDatasetCount no error");
+    M_AssertTrue(size == 10, "GetCurrentDatasetCount size");
+
+
 // incomplete datasets without min_size
 
     dataset = consumer->GetNextDataset(group_id, 0, "incomplete", &err);
@@ -271,6 +294,14 @@ void TestDataset(const std::unique_ptr<asapo::Consumer>& consumer, const std::st
     M_AssertTrue(err == nullptr, "GetDatasetById incomplete minsize error");
     M_AssertTrue(dataset.content[0].name == "2_1", "GetDatasetById incomplete minsize filename");
 
+    size = consumer->GetCurrentDatasetCount("incomplete", true, &err);
+    M_AssertTrue(err == nullptr, "GetCurrentDatasetCount including incomplete no error");
+    M_AssertTrue(size == 5, "GetCurrentDatasetCount including incomplete size");
+
+    size = consumer->GetCurrentDatasetCount("incomplete", false, &err);
+    M_AssertTrue(err == nullptr, "GetCurrentDatasetCount excluding incomplete no error");
+    M_AssertTrue(size == 0, "GetCurrentDatasetCount excluding incomplete size");
+
 
 }
 
diff --git a/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in b/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
index d4916f87e39ca954a28ffc6e72199acd0d046c3f..611bdefecbc64cd2c976e69593975c81eaf57b13 100644
--- a/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
+++ b/tests/automatic/consumer/consumer_api_python/authorizer_settings.json.tpl.in
@@ -3,7 +3,8 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
     {
diff --git a/tests/automatic/consumer/consumer_api_python/check_linux.sh b/tests/automatic/consumer/consumer_api_python/check_linux.sh
index 22d179ce11e85172925902031dbfafe5c3147e19..fabcbdc2e737426d4d2b1843c73685498dc84660 100644
--- a/tests/automatic/consumer/consumer_api_python/check_linux.sh
+++ b/tests/automatic/consumer/consumer_api_python/check_linux.sh
@@ -4,10 +4,9 @@ beamtime_id=test_run
 source_path=`pwd`/asap3/petra3/gpfs/p01/2019/data/$beamtime_id
 data_source=detector
 database_name=${beamtime_id}_${data_source}
-token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+token_test_run=$BT_TEST_RUN_TOKEN
 set -e
 
-
 trap Cleanup EXIT
 
 Cleanup() {
@@ -50,6 +49,10 @@ do
 	echo 'db.data_stream2.insert({"_id":'$i',"size":6,"name":"'2$i'","timestamp":3000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}})' | mongo ${database_name}
 done
 
+echo 'db.data_stream1.insert({"_id":'6',"size":0,"name":"asapo_finish_stream","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"ns"}})' | mongo ${database_name}
+echo 'db.data_stream2.insert({"_id":'6',"size":0,"name":"asapo_finish_stream","timestamp":3000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"asapo_no_next"}})' | mongo ${database_name}
+
+
 sleep 1
 
 export PYTHONPATH=$1:${PYTHONPATH}
diff --git a/tests/automatic/consumer/consumer_api_python/check_windows.bat b/tests/automatic/consumer/consumer_api_python/check_windows.bat
index adcf8ce57d735d5b6da75b464aabe61bc82d0cc1..6f56883eb624186cf375822f27c913f558e7817e 100644
--- a/tests/automatic/consumer/consumer_api_python/check_windows.bat
+++ b/tests/automatic/consumer/consumer_api_python/check_windows.bat
@@ -8,8 +8,7 @@ SET data_source=detector
 SET database_name=%beamtime_id%_%data_source%
 
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
-
+set token_test_run=%BT_TEST_RUN_TOKEN%
 call start_services.bat
 
 for /l %%x in (1, 1, 5) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"%%x","timestamp":0,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
@@ -20,6 +19,8 @@ for /l %%x in (1, 1, 5) do echo db.data_stream1.insert({"_id":%%x,"size":6,"name
 
 for /l %%x in (1, 1, 5) do echo db.data_stream2.insert({"_id":%%x,"size":6,"name":"2%%x","timestamp":3000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"test":10}}) | %mongo_exe% %database_name%  || goto :error
 
+echo db.data_stream1.insert({"_id":6,"size":0,"name":"asapo_finish_stream","timestamp":2000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"ns"}}) | %mongo_exe% %database_name%  || goto :error
+echo db.data_stream2.insert({"_id":6,"size":0,"name":"asapo_finish_stream","timestamp":3000,"source":"none","buf_id":0,"dataset_substream":0,"meta":{"next_stream":"asapo_no_next"}}) | %mongo_exe% %database_name%  || goto :error
 
 mkdir %source_path%
 
diff --git a/tests/automatic/consumer/consumer_api_python/consumer_api.py b/tests/automatic/consumer/consumer_api_python/consumer_api.py
index 013ce0516a8113aa1ee22b255dc37ddfe93034ef..1b5bf5a153d00bfbd4336529a2ee9a9b9823799f 100644
--- a/tests/automatic/consumer/consumer_api_python/consumer_api.py
+++ b/tests/automatic/consumer/consumer_api_python/consumer_api.py
@@ -29,6 +29,13 @@ def assert_usermetadata(meta, name):
         print('meta: ', json.dumps(meta, indent=4, sort_keys=True))
         sys.exit(1)
 
+def assert_version(version):
+    print("asserting version ",version)
+    ok = version['supported'] and version['client'] and version['server']
+    if not ok:
+        sys.exit(1)
+
+
 
 def assert_eq(val, expected, name):
     print("asserting eq for " + name)
@@ -48,6 +55,10 @@ def check_file_transfer_service(consumer, group_id):
 
 def check_single(consumer, group_id):
     global thread_res
+
+    version = consumer.get_version_info()
+    assert_version(version)
+
     _, meta = consumer.get_next(group_id, meta_only=True)
     assert_metaname(meta, "1", "get next1")
     assert_usermetadata(meta, "get next1")
@@ -79,6 +90,14 @@ def check_single(consumer, group_id):
     size = consumer.get_current_size()
     assert_eq(size, 5, "get_current_size")
 
+    try:
+        size = consumer.get_current_dataset_count(include_incomplete = True)
+    except asapo_consumer.AsapoWrongInputError as err:
+        pass
+    else:
+        exit_on_noerr("get_current_dataset_count for single messages err")
+
+
     consumer.reset_lastread_marker(group_id)
 
     _, meta = consumer.get_next(group_id, meta_only=True)
@@ -121,14 +140,30 @@ def check_single(consumer, group_id):
     _, meta = consumer.get_next(group_id, meta_only=True, stream = "stream2")
     assert_metaname(meta, "21", "get next stream2")
 
-    streams = consumer.get_stream_list("")
+    streams = consumer.get_stream_list("","all")
     assert_eq(len(streams), 4, "number of streams")
     print(streams)
     assert_eq(streams[0]["name"], "default", "streams_name1")
+    assert_eq(streams[0]["finished"], False, "streams_finished1")
     assert_eq(streams[1]["name"], "streamfts", "streams_name2")
     assert_eq(streams[2]["name"], "stream1", "streams_name2")
     assert_eq(streams[3]["name"], "stream2", "streams_name3")
     assert_eq(streams[1]["timestampCreated"], 1000, "streams_timestamp2")
+    assert_eq(streams[2]["timestampLast"], 2000, "streams_timestamplast2")
+    assert_eq(streams[2]["finished"], True, "streams_finished2")
+    assert_eq(streams[2]["nextStream"], "ns", "next stream 2")
+    assert_eq(streams[2]["lastId"], 5, "last id stream 2")
+    assert_eq(streams[3]["finished"], True, "streams_finished3")
+    assert_eq(streams[3]["nextStream"], "", "next stream 3")
+    assert_eq(streams[3]["lastId"], 5, "last id stream 3")
+
+    finished_streams = consumer.get_stream_list("","finished")
+    assert_eq(len(finished_streams), 2, "number of finished streams")
+    assert_eq(finished_streams[0]["name"], "stream1", "finished streams_name1")
+
+    unfinished_streams = consumer.get_stream_list("","unfinished")
+    assert_eq(len(unfinished_streams), 2, "number of unfinished streams")
+    assert_eq(unfinished_streams[0]["name"], "default", "unfinished streams_name1")
 
     # acks
     try:
@@ -269,6 +304,9 @@ def check_dataset(consumer, group_id):
     assert_eq(res['id'], 8, "get_dataset_by_id1 id")
     assert_metaname(res['content'][2], "8_3", "get get_dataset_by_id1 name3")
 
+    size = consumer.get_current_dataset_count()
+    assert_eq(size, 10, "get_current_dataset_count")
+
     # incomplete datesets without min_size given
     try:
         consumer.get_next_dataset(group_id, stream = "incomplete")
@@ -308,6 +346,14 @@ def check_dataset(consumer, group_id):
     res = consumer.get_dataset_by_id(2, min_size=1, stream = "incomplete")
     assert_eq(res['id'], 2, "get_dataset_by_id incomplete with minsize")
 
+    size = consumer.get_current_dataset_count(stream = "incomplete", include_incomplete = False)
+    assert_eq(size, 0, "get_current_dataset_count excluding incomplete")
+
+    size = consumer.get_current_dataset_count(stream = "incomplete", include_incomplete = True)
+    assert_eq(size, 5, "get_current_dataset_count including incomplete")
+
+    size = consumer.get_current_size(stream = "incomplete") # should work as well
+    assert_eq(size, 5, "get_current_size for datasets")
 
 source, path, beamtime, token, mode = sys.argv[1:]
 
diff --git a/tests/automatic/consumer/next_multithread_broker/check_linux.sh b/tests/automatic/consumer/next_multithread_broker/check_linux.sh
index b172ad0ac649f3ec6646f1c71b3ce881fd55d61b..d507f1e9fc261b660e7a5a77cd32eff4868c7436 100644
--- a/tests/automatic/consumer/next_multithread_broker/check_linux.sh
+++ b/tests/automatic/consumer/next_multithread_broker/check_linux.sh
@@ -1,8 +1,7 @@
 #!/usr/bin/env bash
 
 database_name=test_run_detector
-token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
-
+token_test_run=$BT_TEST_RUN_TOKEN
 set -e
 
 trap Cleanup EXIT
@@ -12,6 +11,7 @@ Cleanup() {
     nomad stop nginx
     nomad run nginx_kill.nmd  && nomad stop -yes -purge nginx_kill
     nomad stop discovery
+    nomad stop authorizer
     nomad stop broker
 	echo "db.dropDatabase()" | mongo ${database_name}
 }
@@ -19,6 +19,7 @@ Cleanup() {
 
 nomad run nginx.nmd
 nomad run discovery.nmd
+nomad run authorizer.nmd
 nomad run broker.nmd
 
 sleep 1
diff --git a/tests/automatic/consumer/next_multithread_broker/check_windows.bat b/tests/automatic/consumer/next_multithread_broker/check_windows.bat
index 4a13c733a4e3764b4aa452e7e7af806fc6eb5f22..d995d7cec883a1b6a71059035d6b52d094fe682e 100644
--- a/tests/automatic/consumer/next_multithread_broker/check_windows.bat
+++ b/tests/automatic/consumer/next_multithread_broker/check_windows.bat
@@ -1,6 +1,6 @@
 SET database_name=test_run_detector
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
-set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo=
+set token_test_run=%BT_TEST_RUN_TOKEN%
 
 call start_services.bat
 
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in b/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
index d4916f87e39ca954a28ffc6e72199acd0d046c3f..611bdefecbc64cd2c976e69593975c81eaf57b13 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
+++ b/tests/automatic/curl_http_client/curl_http_client_command/authorizer_settings.json.tpl.in
@@ -3,7 +3,8 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
     {
diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
index 33e1b6a6665255ebc048374b96c67505155b77ff..c3391d9ab9f63c634420893f615484eacdd7762e 100644
--- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
+++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp
@@ -28,7 +28,7 @@ Args GetArgs(int argc, char* argv[]) {
 int main(int argc, char* argv[]) {
 
     auto args = GetArgs(argc, argv);
-    auto token = "bnCXpOdBV90wU1zybEw1duQNSORuwaKz6oDHqmL35p0="; //token for aaa
+    auto token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkycDFiaXB0MzBub3AwcTNlZyIsInN1YiI6ImJ0X2FhYSIsIkV4dHJhQ2xhaW1zIjp7IkFjY2Vzc1R5cGVzIjpbInJlYWQiXX19.dt3ifrG3zqQP4uM2kaoe7ydDjUdFeasOB07fVRfFApE"; //token for aaa
     std::string authorize_request = "{\"Folder\":\"" + args.folder + "\",\"BeamtimeId\":\"aaa\",\"Token\":\"" + token +
                                     "\"}";
     asapo::Error err;
@@ -44,15 +44,15 @@ int main(int argc, char* argv[]) {
     asapo::HttpCode code;
     std::string response;
     std::string input_data;
-    auto folder_token = consumer_impl->httpclient__->Post(args.uri_authorizer + "/folder", "", authorize_request, &code,
+    auto folder_token = consumer_impl->httpclient__->Post(args.uri_authorizer + "/v0.1/folder", "", authorize_request, &code,
                         &err);
-    M_AssertTrue(err == nullptr);
-    M_AssertTrue(code == asapo::HttpCode::OK);
     if (err) {
         std::cout << err->Explain();
     }
+    M_AssertTrue(err == nullptr);
+    M_AssertTrue(code == asapo::HttpCode::OK);
 
-    consumer_impl->httpclient__->Post(args.uri_authorizer + "/folder", "", "", &code, &err);
+    consumer_impl->httpclient__->Post(args.uri_authorizer + "/v0.1/folder", "", "", &code, &err);
     M_AssertTrue(code == asapo::HttpCode::BadRequest);
 
     consumer_impl->httpclient__->Post(args.uri_authorizer + "/bla", "", "", &code, &err);
@@ -61,12 +61,12 @@ int main(int argc, char* argv[]) {
 // check post with data
     std::string transfer = "{\"Folder\":\"" + args.folder + "\",\"FileName\":\"aaa\"}";
     std::string cookie = "Authorization=Bearer " + folder_token + ";";
-    auto content = consumer_impl->httpclient__->Post(args.uri_fts + "/transfer", cookie, transfer, &code, &err);
+    auto content = consumer_impl->httpclient__->Post(args.uri_fts + "/v0.1/transfer", cookie, transfer, &code, &err);
     M_AssertEq("hello", content);
     M_AssertTrue(code == asapo::HttpCode::OK);
 // with array
     asapo::MessageData data;
-    err = consumer_impl->httpclient__->Post(args.uri_fts + "/transfer", cookie, transfer, &data, 5, &code);
+    err = consumer_impl->httpclient__->Post(args.uri_fts + "/v0.1/transfer", cookie, transfer, &data, 5, &code);
     M_AssertEq( "hello", reinterpret_cast<char const*>(data.get()));
     M_AssertTrue(code == asapo::HttpCode::OK);
 
@@ -76,7 +76,7 @@ int main(int argc, char* argv[]) {
     uint64_t size = 0;
     auto expected_data = io->GetDataFromFile(fname, &size, &err);
     M_AssertEq(nullptr, err);
-    err = consumer_impl->httpclient__->Post(args.uri_fts + "/transfer", cookie, transfer, &data, size, &code);
+    err = consumer_impl->httpclient__->Post(args.uri_fts + "/v0.1/transfer", cookie, transfer, &data, size, &code);
     M_AssertTrue(code == asapo::HttpCode::OK);
     for (uint64_t i = 0; i < size; i++) {
         if (expected_data[i] != data[i]) {
@@ -86,11 +86,11 @@ int main(int argc, char* argv[]) {
 
 // with file
     transfer = "{\"Folder\":\"" + args.folder + "\",\"FileName\":\"aaa\"}";
-    err = consumer_impl->httpclient__->Post(args.uri_fts + "/transfer", cookie, transfer, "bbb", &code);
+    err = consumer_impl->httpclient__->Post(args.uri_fts + "/v0.1/transfer", cookie, transfer, "bbb", &code);
     M_AssertTrue(code == asapo::HttpCode::OK);
 
     transfer = "{\"Folder\":\"" + args.folder + "\",\"FileName\":\"random\"}";
-    err = consumer_impl->httpclient__->Post(args.uri_fts + "/transfer", cookie, transfer, "random", &code);
+    err = consumer_impl->httpclient__->Post(args.uri_fts + "/v0.1/transfer", cookie, transfer, "random", &code);
     M_AssertTrue(code == asapo::HttpCode::OK);
 
     return 0;
diff --git a/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in b/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
index d4916f87e39ca954a28ffc6e72199acd0d046c3f..611bdefecbc64cd2c976e69593975c81eaf57b13 100644
--- a/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
+++ b/tests/automatic/file_transfer_service/rest_api/authorizer_settings.json.tpl.in
@@ -3,7 +3,8 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
     {
diff --git a/tests/automatic/file_transfer_service/rest_api/check_linux.sh b/tests/automatic/file_transfer_service/rest_api/check_linux.sh
index 04b478d498eedff2ca34c824bfed0c5473974289..e7350241666f62bd20a802c6ea921af2f5e3f725 100644
--- a/tests/automatic/file_transfer_service/rest_api/check_linux.sh
+++ b/tests/automatic/file_transfer_service/rest_api/check_linux.sh
@@ -21,23 +21,24 @@ sleep 1
 
 mkdir -p $file_transfer_folder
 
-token=bnCXpOdBV90wU1zybEw1duQNSORuwaKz6oDHqmL35p0= #token for aaa
-folder_token=`curl --silent --data "{\"Folder\":\"$file_transfer_folder\",\"BeamtimeId\":\"aaa\",\"Token\":\"$token\"}" 127.0.0.1:5007/folder`
+token=$BT_AAA_TOKEN
+
+folder_token=`curl --silent --data "{\"Folder\":\"$file_transfer_folder\",\"BeamtimeId\":\"aaa\",\"Token\":\"$token\"}" 127.0.0.1:5007/v0.1/folder`
 echo $folder_token
 
 
 dd if=/dev/urandom of=$file_transfer_folder/aaa bs=1 count=100000
 
-curl -o aaa --silent -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/transfer --stderr - | tee /dev/stderr
+curl -o aaa --silent -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer --stderr - | tee /dev/stderr
 
-curl -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/transfer?sizeonly=true --stderr - | tee /dev/stderr | grep 100000
+curl -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"aaa\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer?sizeonly=true --stderr - | tee /dev/stderr | grep 100000
 
 
 diff -q aaa $file_transfer_folder/aaa
 
 dd if=/dev/zero of=$file_transfer_folder/big_file bs=1 count=0 seek=5368709120
 
-curl -vvv -o big_file -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"big_file\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/transfer --stderr -  | tee /dev/stderr
+curl -vvv -o big_file -H "Authorization: Bearer ${folder_token}" --data "{\"Folder\":\"$file_transfer_folder\",\"FileName\":\"big_file\",\"Token\":\"$folder_token\"}" 127.0.0.1:5008/v0.1/transfer --stderr -  | tee /dev/stderr
 
 ls -ln big_file | awk '{ print $5 }' | tee /dev/stderr | grep 5368709120
 
diff --git a/tests/automatic/file_transfer_service/rest_api/check_windows.bat b/tests/automatic/file_transfer_service/rest_api/check_windows.bat
index 9814125d2df3f0805ebb51a6a14fe992bb0100c4..9f869e0d9973a630e45ea532b1fbe35a10179325 100644
--- a/tests/automatic/file_transfer_service/rest_api/check_windows.bat
+++ b/tests/automatic/file_transfer_service/rest_api/check_windows.bat
@@ -9,16 +9,16 @@ c:\opt\consul\nomad run file_transfer.nmd
 
 ping 192.0.2.1 -n 1 -w 1000 > nul
 
-set token=bnCXpOdBV90wU1zybEw1duQNSORuwaKz6oDHqmL35p0=
+set token=%BT_AAA_TOKEN%
 
 mkdir %file_transfer_folder%
 
-C:\Curl\curl.exe --silent --data "{\"Folder\":\"%file_transfer_folder%\",\"BeamtimeId\":\"aaa\",\"Token\":\"%token%\"}" 127.0.0.1:5007/folder > token
+C:\Curl\curl.exe --silent --data "{\"Folder\":\"%file_transfer_folder%\",\"BeamtimeId\":\"aaa\",\"Token\":\"%token%\"}" 127.0.0.1:5007/v0.1/folder > token
 set /P folder_token=< token
 
 echo hello > %file_transfer_folder%\aaa
 
-C:\Curl\curl.exe --silent -H "Authorization: Bearer %folder_token%" --data "{\"Folder\":\"%file_transfer_folder%\",\"FileName\":\"aaa\",\"Token\":\"%folder_token%\"}" 127.0.0.1:5008/transfer --stderr - | findstr hello  || goto :error
+C:\Curl\curl.exe --silent -H "Authorization: Bearer %folder_token%" --data "{\"Folder\":\"%file_transfer_folder%\",\"FileName\":\"aaa\",\"Token\":\"%folder_token%\"}" 127.0.0.1:5008/v0.1/transfer --stderr - | findstr hello  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/full_chain/send_recv_streams/check_linux.sh b/tests/automatic/full_chain/send_recv_streams/check_linux.sh
index f7f1f0241bd2b973a45ef4c8141053c3209dbccf..49aaba84a8a519ed586a6107df83ef75d830dadb 100644
--- a/tests/automatic/full_chain/send_recv_streams/check_linux.sh
+++ b/tests/automatic/full_chain/send_recv_streams/check_linux.sh
@@ -5,7 +5,9 @@ beamtime_id=asapo_test
 stream_in=detector
 
 indatabase_name=${beamtime_id}_${stream_in}
-token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+
+#asapo_test read token
+token=$ASAPO_TEST_RW_TOKEN
 
 beamline=test
 
diff --git a/tests/automatic/full_chain/send_recv_streams/check_windows.bat b/tests/automatic/full_chain/send_recv_streams/check_windows.bat
index d89ca68c8e229b56fd09562bfa1712b2463490f6..e4c54b28b3878f3cceee27f1fb41993e9747f798 100644
--- a/tests/automatic/full_chain/send_recv_streams/check_windows.bat
+++ b/tests/automatic/full_chain/send_recv_streams/check_windows.bat
@@ -4,7 +4,7 @@ SET stream_in=detector
 
 SET indatabase_name=%beamtime_id%_%stream_in%
 
-SET token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+SET token=%ASAPO_TEST_RW_TOKEN%
 
 SET beamline=test
 
diff --git a/tests/automatic/full_chain/send_recv_streams/send_recv_streams.cpp b/tests/automatic/full_chain/send_recv_streams/send_recv_streams.cpp
index 5f025e5f434998f801316a0be33e0426d7937334..7858801dcd1ea3b0ef8a4e53225c42fde5be4f16 100644
--- a/tests/automatic/full_chain/send_recv_streams/send_recv_streams.cpp
+++ b/tests/automatic/full_chain/send_recv_streams/send_recv_streams.cpp
@@ -73,9 +73,8 @@ ProducerPtr CreateProducer(const Args& args) {
 }
 
 int main(int argc, char* argv[]) {
-    asapo::ExitAfterPrintVersionIfNeeded("GetNext consumer Example", argc, argv);
     Args args;
-    if (argc != 5) {
+    if (argc != 4) {
         std::cout << "Usage: " + std::string{argv[0]}
                   + " <server> <network_type> <beamtime_id> <token>"
                   <<
diff --git a/tests/automatic/full_chain/send_recv_streams_python/check_linux.sh b/tests/automatic/full_chain/send_recv_streams_python/check_linux.sh
index 024acde6816e99ac39d9bcee2dabae58a9fbfe7b..490f7cd40bdf1aa801454845dcfdbd865b84fbec 100644
--- a/tests/automatic/full_chain/send_recv_streams_python/check_linux.sh
+++ b/tests/automatic/full_chain/send_recv_streams_python/check_linux.sh
@@ -5,7 +5,7 @@ beamtime_id=asapo_test
 stream_in=detector
 
 indatabase_name=${beamtime_id}_${stream_in}
-token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
+token=$ASAPO_TEST_RW_TOKEN
 
 beamline=test
 
diff --git a/tests/automatic/full_chain/send_recv_streams_python/check_windows.bat b/tests/automatic/full_chain/send_recv_streams_python/check_windows.bat
index 475943c379ac9a534bfd8afb91e9616585f9b055..8df8923517b5058397c4e7f8d5cdeed9fdb60679 100644
--- a/tests/automatic/full_chain/send_recv_streams_python/check_windows.bat
+++ b/tests/automatic/full_chain/send_recv_streams_python/check_windows.bat
@@ -4,8 +4,7 @@ SET stream_in=detector
 
 SET indatabase_name=%beamtime_id%_%stream_in%
 
-SET token=IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=
-
+SET token=%ASAPO_TEST_RW_TOKEN%
 SET beamline=test
 
 SET mongo_exe="c:\Program Files\MongoDB\Server\4.2\bin\mongo.exe"
diff --git a/tests/automatic/full_chain/simple_chain/check_linux.sh b/tests/automatic/full_chain/simple_chain/check_linux.sh
index fcc4a10f02dace183ace3188fd824821bed088e6..0baea68ff6ff629c0b1ab4881e1440edba4e9782 100755
--- a/tests/automatic/full_chain/simple_chain/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain/check_linux.sh
@@ -9,7 +9,6 @@ consumer_bin=$2
 asapo_tool_bin=$3
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -49,6 +48,9 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin localhost:8400 ${beamtime_id} 100 1000 4 0 100
diff --git a/tests/automatic/full_chain/simple_chain/check_windows.bat b/tests/automatic/full_chain/simple_chain/check_windows.bat
index 8ef712f1406b3e2d033ff796f04c55bf8095878b..f6a7cb7925c6b75fbf2cbb36e18a4b039731740f 100644
--- a/tests/automatic/full_chain/simple_chain/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain/check_windows.bat
@@ -5,16 +5,15 @@ SET receiver_root_folder=c:\tmp\asapo\receiver\files
 
 SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
 REM producer
 mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id% 100 1000 4 0 100
diff --git a/tests/automatic/full_chain/simple_chain_dataset/check_linux.sh b/tests/automatic/full_chain/simple_chain_dataset/check_linux.sh
index 6972a6ef1174e7224eeed0c020aac6f6308919d0..93f9960a88f635916d0717f2ee42c27112dd4706 100644
--- a/tests/automatic/full_chain/simple_chain_dataset/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_dataset/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -46,6 +45,8 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin localhost:8400 ${beamtime_id} 100 100 4 0 100 5 &
diff --git a/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat b/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
index 030c045ebd7ec2db441dade5d4ee3943453f1c36..7057a8d9b3cd63f48f59cc9ac7da4e403cf35bef 100644
--- a/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_dataset/check_windows.bat
@@ -4,16 +4,15 @@ SET beamline=test
 SET receiver_root_folder=c:\tmp\asapo\receiver\files
 SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
 REM producer
 mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id% 100 100 4 0 100 5
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
index 6720558682eecbc8ee9d4a81d3b8f522fee875bc..0bd69ff985116b9f46df3ace65571196af175243 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -47,6 +46,8 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin test.json &
diff --git a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
index 1eeb2fe43079f45af4c2f645ee6da82f54805040..9d783bd6ad254ac7674fa9262a2a33617febd228 100644
--- a/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen/check_windows.bat
@@ -8,22 +8,22 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in\processed
 start /B "" "%1" test.json
 
-ping 192.0.2.1 -n 1 -w 1000 > nul
+ping 192.0.2.1 -n 1 -w 5000 > nul
 
 mkdir  c:\tmp\asapo\test_in\processed\test1
 mkdir  c:\tmp\asapo\test_in\processed\test2
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
index ef67e1cdd2e9eb132443449e291d91cb27121ea4..553a3b90c33b12c4cbaf3e3d6750596b4f5e8db3 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -50,6 +49,9 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 mkdir  /tmp/asapo/test_in/processed/test1
 mkdir  /tmp/asapo/test_in/processed/test2
 
diff --git a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
index a8842020e2c58d1a4e5e578d17606fd162090f02..bc1a1402ae6b96aa54776e20c1bea2cc082cae48 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_batches/check_windows.bat
@@ -8,16 +8,16 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in\processed
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
index 5283a0d682e03bfacdd63bf2439b5c74e5ffe0a1..9619e32b3ceb6c42a966fc7a501fa623f1b6b576 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_linux.sh
@@ -9,7 +9,6 @@ consumer_bin=$2
 asapo_tool_bin=$3
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -48,6 +47,7 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
 
 mkdir -p /tmp/asapo/test_in1/processed
 mkdir -p /tmp/asapo/test_in2/processed
diff --git a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
index 8ae26ac1400799601c979724ba5697128bb9f5f0..ef7d8294e3e380550237571139a0b7910cb729dd 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_multisource/check_windows.bat
@@ -8,16 +8,16 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
+
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in1\processed
 mkdir  c:\tmp\asapo\test_in2\processed
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
index b57cbbbad09a957ee9cab06d77a88f5fbdd7101c..0e12523a64c39ebd2f6f34be348d791804f4b395 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$3 token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -50,6 +49,8 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$3 token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin test.json &
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
index 32754618f82a36e2316b561bb090f15daf8825c8..dfde693b8fbd64470c3d8bc5cbd3b6e750548775 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_cache/check_windows.bat
@@ -8,16 +8,16 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in\processed
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
index 6a571d4d27ce9b83586664d0b3ec60a80a4596b3..748986058523fcfb329688bf4c3eda99234a61d0 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -50,6 +49,9 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin test.json &
diff --git a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
index e68b0ca6332b31a1d75a94e7cba303e94ae52b7a..63a5f3521d5485552aeae57a6efbefe39b8a976b 100644
--- a/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_filegen_readdata_file/check_windows.bat
@@ -8,16 +8,16 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 
 set producer_short_name="%~nx1"
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 mkdir  c:\tmp\asapo\test_in\processed
diff --git a/tests/automatic/full_chain/simple_chain_metadata/check_linux.sh b/tests/automatic/full_chain/simple_chain_metadata/check_linux.sh
index d766e7ae9933ba3d88167dd64f3b05dbb5a410ce..70fc2dfe2b509b1e783076f89c2cf9403d295766 100644
--- a/tests/automatic/full_chain/simple_chain_metadata/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_metadata/check_linux.sh
@@ -9,7 +9,6 @@ consumer_bin=$2
 asapo_tool_bin=$3
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -45,6 +44,9 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin localhost:8400 ${beamtime_id} 100 0 1 0 1000
diff --git a/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat b/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
index 9dbba7db3e8c1d8487c9d0ccc19eb20d0e9226ea..f5121e9f4e478607434b2810c238b83fc1f25523 100644
--- a/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_metadata/check_windows.bat
@@ -4,15 +4,15 @@ SET beamline=test
 SET receiver_root_folder=c:\tmp\asapo\receiver\files
 SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
 
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
 REM producer
 mkdir %receiver_folder%
 "%1" %proxy_address% %beamtime_id% 100 0 1 0 1000
diff --git a/tests/automatic/full_chain/simple_chain_raw/check_linux.sh b/tests/automatic/full_chain/simple_chain_raw/check_linux.sh
index f56ee8ca5627333538906b17b4fcdac9de30dc97..176da746d3e600097f498ed8084b5a1bdc4e8bc8 100644
--- a/tests/automatic/full_chain/simple_chain_raw/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_raw/check_linux.sh
@@ -5,7 +5,6 @@ set -e
 trap Cleanup EXIT
 
 beamtime_id=11111111
-token=`$3 token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -40,6 +39,8 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$3 token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 #producer
 $1 localhost:8400 ${beamtime_id} 100 10 4 100 100
 
diff --git a/tests/automatic/full_chain/simple_chain_raw/check_windows.bat b/tests/automatic/full_chain/simple_chain_raw/check_windows.bat
index 8f9eea4aad498cba4d88be434eb9ff3fcb37ae6c..c60ca7696ac0ddd01837f8cbf94d9c468e50a83a 100644
--- a/tests/automatic/full_chain/simple_chain_raw/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_raw/check_windows.bat
@@ -6,14 +6,15 @@ mkdir beamline\p07\current
 copy beamtime-metadata* beamline\p07\current\ /y
 copy beamtime-metadata* asap3\petra3\gpfs\p07\2019\data\11111111\ /y
 
-
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id% 100 10 4 100 100
diff --git a/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in b/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in
index 4aecbe840466b510e46c567c4871bd892b110bcc..130d17e47a5489c0b6beb0fcd93edb4eab1b91ee 100644
--- a/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in
+++ b/tests/automatic/full_chain/simple_chain_raw/settings.json.tpl.in
@@ -3,7 +3,8 @@
   "LogLevel":"debug",
   "RootBeamtimesFolder":"@ASAP3_FOLDER@",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "Ldap":
     {
         "Uri" : "ldap://localhost:389",
diff --git a/tests/automatic/full_chain/simple_chain_usermeta_python/check_linux.sh b/tests/automatic/full_chain/simple_chain_usermeta_python/check_linux.sh
index 3023c28f4c8a518836df54349c492d8c78ae6372..129b8298c423c022e4c5887a7ff8119a98b02474 100644
--- a/tests/automatic/full_chain/simple_chain_usermeta_python/check_linux.sh
+++ b/tests/automatic/full_chain/simple_chain_usermeta_python/check_linux.sh
@@ -8,7 +8,6 @@ producer_bin=$1
 asapo_tool_bin=$2
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -44,6 +43,9 @@ nomad run broker.nmd
 
 sleep 2
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 echo "Start producer"
 mkdir -p ${receiver_folder}
 $producer_bin localhost:8400 ${beamtime_id} 100 100 1 0 100
diff --git a/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat b/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
index e68687fa6050d59c41fd987b0ad8ed026abb6445..86d039d555c236bba13be0f03fe65d8925b87007 100644
--- a/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
+++ b/tests/automatic/full_chain/simple_chain_usermeta_python/check_windows.bat
@@ -4,16 +4,17 @@ SET beamline=test
 SET receiver_root_folder=c:\tmp\asapo\receiver\files
 SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
 
-
-"%2" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id%_detector.insert({dummy:1}) | %mongo_exe% %beamtime_id%_detector
 
 call start_services.bat
 
+"%2" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+
+set /P token=< token
+
+
 REM producer
 mkdir %receiver_folder%
 "%1" %proxy_address% %beamtime_id% 100 100 4 0 100
diff --git a/tests/automatic/full_chain/two_beamlines/check_linux.sh b/tests/automatic/full_chain/two_beamlines/check_linux.sh
index f518a57a459409d7bee58caf630ff87b4150601a..f43ddad7bb44e55d68bae73b56c271f082c1aabb 100644
--- a/tests/automatic/full_chain/two_beamlines/check_linux.sh
+++ b/tests/automatic/full_chain/two_beamlines/check_linux.sh
@@ -12,10 +12,8 @@ network_type=$4
 data_source=detector
 
 beamtime_id1=asapo_test1
-token1=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id1`
 
 beamtime_id2=asapo_test2
-token2=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id2`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -56,6 +54,9 @@ nomad run broker.nmd
 
 sleep 3
 
+token1=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id1`
+token2=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id2`
+
 echo "Start producers"
 mkdir -p ${receiver_folder1}
 mkdir -p ${receiver_folder2}
diff --git a/tests/automatic/full_chain/two_beamlines/check_windows.bat b/tests/automatic/full_chain/two_beamlines/check_windows.bat
index 1f09a912ea9b8f0e886d9ba58a1edf9c241bc162..1d7390636b4c7a0aefd9a916c3f76cc519a0e5fe 100644
--- a/tests/automatic/full_chain/two_beamlines/check_windows.bat
+++ b/tests/automatic/full_chain/two_beamlines/check_windows.bat
@@ -13,11 +13,6 @@ SET year=2019
 SET receiver_folder1="%receiver_root_folder%\%facility%\gpfs\%beamline1%\%year%\data\%beamtime_id1%"
 SET receiver_folder2="%receiver_root_folder%\%facility%\gpfs\%beamline2%\%year%\data\%beamtime_id2%"
 
-"%3" token -secret auth_secret.key %beamtime_id1% > token
-set /P token1=< token
-"%3" token -secret auth_secret.key %beamtime_id2% > token
-set /P token2=< token
-
 set proxy_address="127.0.0.1:8400"
 
 echo db.%beamtime_id1%_%data_source%.insert({dummy:1}) | %mongo_exe% %beamtime_id1%_%data_source%
@@ -25,6 +20,13 @@ echo db.%beamtime_id2%_%data_source%.insert({dummy:1}) | %mongo_exe% %beamtime_i
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id1% > token
+set /P token1=< token
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id2% > token
+set /P token2=< token
+
+
+
 REM producer
 mkdir %receiver_folder1%
 mkdir %receiver_folder2%
diff --git a/tests/automatic/full_chain/two_streams/check_linux.sh b/tests/automatic/full_chain/two_streams/check_linux.sh
index fbbe34ab9801818131ae7443a2a6203092b88579..835de3037b542248963514aaf3cba5db493d8d34 100644
--- a/tests/automatic/full_chain/two_streams/check_linux.sh
+++ b/tests/automatic/full_chain/two_streams/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 stream1=s1
 stream2=s2
@@ -49,6 +48,8 @@ nomad run broker.nmd
 
 sleep 3
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 echo "Start producers"
 mkdir -p ${receiver_folder}
 $producer_bin localhost:8400 ${beamtime_id}%${stream1} 100 1000 4 0 100 &
diff --git a/tests/automatic/full_chain/two_streams/check_windows.bat b/tests/automatic/full_chain/two_streams/check_windows.bat
index c2ba213cb82ae06693ea682576a93e35a714f2bc..5e3b68c2c92e7b6baa3920540be8eb77266553e8 100644
--- a/tests/automatic/full_chain/two_streams/check_windows.bat
+++ b/tests/automatic/full_chain/two_streams/check_windows.bat
@@ -7,8 +7,6 @@ SET stream2=s2
 SET receiver_root_folder=c:\tmp\asapo\receiver\files
 SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id%"
 
-"%3" token -secret auth_secret.key %beamtime_id% > token
-set /P token=< token
 
 set proxy_address="127.0.0.1:8400"
 
@@ -17,6 +15,9 @@ echo db.%beamtime_id%_%stream2%.insert({dummy:1}) | %mongo_exe% %beamtime_id%_%s
 
 call start_services.bat
 
+"%3" token -endpoint http://127.0.0.1:8400/asapo-authorizer -secret admin_token.key -types read %beamtime_id% > token
+set /P token=< token
+
 REM producer
 mkdir %receiver_folder%
 start /B "" "%1" %proxy_address% %beamtime_id%%%%stream1% 100 1000 4 0 100
diff --git a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
index 440d88d8ad6e6f2e018905e0bf7264c5afbd301c..89a8247cb4cbdf3e36e9c572f893b69b05f33ad0 100755
--- a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$4
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -81,6 +80,9 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
+
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo --port 27016 ${beamtime_id}_detector
 
 
diff --git a/tests/automatic/high_avail/services_restart/check_linux.sh b/tests/automatic/high_avail/services_restart/check_linux.sh
index 8020cb72ce5b0f32c0da551b0064fad4b219aada..a9e011210b329a56a3174bbfa42bfb82b26446a9 100644
--- a/tests/automatic/high_avail/services_restart/check_linux.sh
+++ b/tests/automatic/high_avail/services_restart/check_linux.sh
@@ -10,7 +10,6 @@ asapo_tool_bin=$3
 network_type=$7
 
 beamtime_id=asapo_test
-token=`$asapo_tool_bin token -secret auth_secret.key $beamtime_id`
 
 monitor_database_name=db_test
 proxy_address=127.0.0.1:8400
@@ -45,6 +44,8 @@ nomad run broker.nmd
 
 sleep 1
 
+token=`$asapo_tool_bin token -endpoint http://localhost:8400/asapo-authorizer -secret admin_token.key -types read $beamtime_id`
+
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo  ${beamtime_id}_detector
 
 echo "Start producer"
diff --git a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
index c360f0339ac45070731b91e92a5c139ec903af9c..559e54d00c5a5e8d739580d82fdb2310e2884238 100644
--- a/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
+++ b/tests/automatic/mongo_db/insert_retrieve/insert_retrieve_mongodb.cpp
@@ -4,6 +4,7 @@
 
 #include "../../../common/cpp/src/database/mongodb_client.h"
 #include "testing.h"
+#include "asapo/common/data_structs.h"
 
 using asapo::Error;
 
@@ -43,6 +44,7 @@ int main(int argc, char* argv[]) {
     fi.buf_id = 18446744073709551615ull;
     fi.source = "host:1234";
 
+
     if (args.keyword != "Notconnected") {
         db.Connect("127.0.0.1", "data");
     }
@@ -60,6 +62,8 @@ int main(int argc, char* argv[]) {
     fi2.id = 123;
     fi1.timestamp = std::chrono::system_clock::now();
     fi2.timestamp = std::chrono::system_clock::now()+std::chrono::minutes(1);
+    fi2.name = asapo::kFinishStreamKeyword;
+    fi2.metadata=R"({"next_stream":"ns"})";
     db.Insert("data_test1", fi1, false);
     db.Insert("data_test1", fi2, false);
 
@@ -83,7 +87,9 @@ int main(int argc, char* argv[]) {
         err = db.GetLastStream(&info);
         M_AssertEq(nullptr, err);
         M_AssertEq(fi2.id, info.last_id);
-        M_AssertEq("test1",info.name);
+        M_AssertEq("test1", info.name);
+        M_AssertEq(true, info.finished);
+        M_AssertEq("ns",info.next_stream);
     }
 
     return 0;
diff --git a/tests/automatic/producer/aai/check_linux.sh b/tests/automatic/producer/aai/check_linux.sh
index eb3b7492f80da285e49aa1124b7e768b1426edfe..d5aeba38c60264f46a24053b42c9976280b9fd98 100644
--- a/tests/automatic/producer/aai/check_linux.sh
+++ b/tests/automatic/producer/aai/check_linux.sh
@@ -13,8 +13,7 @@ facility=test_facility
 year=2019
 receiver_folder=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id}
 receiver_folder2=${receiver_root_folder}/${facility}/gpfs/${beamline}/${year}/data/${beamtime_id2}
-token=-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4= # for bl_p07
-
+token=$BLP07_W_TOKEN
 
 Cleanup() {
 	echo cleanup
diff --git a/tests/automatic/producer/aai/check_windows.bat b/tests/automatic/producer/aai/check_windows.bat
index a115afb6a12a88cb900b76d570b491ba45cc17cb..fa0c3b90200640a4b293687a05da34cb7c47639f 100644
--- a/tests/automatic/producer/aai/check_windows.bat
+++ b/tests/automatic/producer/aai/check_windows.bat
@@ -8,7 +8,8 @@ SET receiver_folder="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\d
 SET receiver_folder2="%receiver_root_folder%\test_facility\gpfs\%beamline%\2019\data\%beamtime_id2%"
 SET dbname=%beamtime_id%_%data_source%
 SET dbname2=%beamtime_id2%_%data_source%
-SET token=-pZmisCNjAbjT2gFBKs3OB2kNOU79SNsfHud0bV8gS4=
+SET token=%BLP07_W_TOKEN%
+
 
 echo db.%dbname%.insert({dummy:1})" | %mongo_exe% %dbname%
 
diff --git a/tests/automatic/producer/aai/settings.json.tpl.in b/tests/automatic/producer/aai/settings.json.tpl.in
index 319ef7063706b37efcbe6c62bbdadb9fe7bfe8b9..a98ad02c79c683d5be97492e6aed38a945b80f54 100644
--- a/tests/automatic/producer/aai/settings.json.tpl.in
+++ b/tests/automatic/producer/aai/settings.json.tpl.in
@@ -2,7 +2,8 @@
   "Port": {{ env "NOMAD_PORT_authorizer" }},
   "LogLevel":"debug",
   "CurrentBeamlinesFolder":"@CURRENT_BEAMLINES_FOLDER@",
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
index 5d91a1fd83076ae400f20452e146cbd31629e058..dc1c1f1eff961eb3d87f7edbba81922fc1a2b074 100644
--- a/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
+++ b/tests/automatic/producer/beamtime_metadata/beamtime_metadata.cpp
@@ -21,7 +21,6 @@ void PrintCommandArguments(const Args& args) {
 }
 
 void ProcessCommandArguments(int argc, char* argv[], Args* args) {
-    asapo::ExitAfterPrintVersionIfNeeded("dummy beamtime metadata", argc, argv);
     if (argc != 4) {
         std::cout <<
                   "Usage: " << argv[0] <<
diff --git a/tests/automatic/producer/python_api/producer_api.py b/tests/automatic/producer/python_api/producer_api.py
index eb14bca76ed10a90c94b978bfb501a007332d1d9..2b419865037cb5cc806da97d3bff01743045c5df 100644
--- a/tests/automatic/producer/python_api/producer_api.py
+++ b/tests/automatic/producer/python_api/producer_api.py
@@ -36,11 +36,21 @@ def callback(payload, err):
         print("successfuly sent: ", payload)
     lock.release()
 
+def assert_version(version):
+    print("asserting version ",version)
+    ok = version['supported'] and version['client'] and version['server']
+    if not ok:
+        sys.exit(1)
 
 producer = asapo_producer.create_producer(endpoint,'processed', beamtime, 'auto', data_source, token, nthreads, 60000)
 
 producer.set_log_level("debug")
 
+
+version = producer.get_version_info()
+assert_version(version)
+
+
 # send single file
 producer.send_file(1, local_path="./file1", exposed_path="processed/" + data_source + "/" + "file1",
                    user_meta='{"test_key":"test_val"}', callback=callback)
@@ -128,6 +138,7 @@ producer.send_file(1, local_path="./file1", exposed_path="processed/" + data_sou
 producer.wait_requests_finished(50000)
 n = producer.get_requests_queue_size()
 assert_eq(n, 0, "requests in queue")
+assert_eq(n, 0, "requests in queue")
 
 # send to another data to stream stream
 producer.send(2, "processed/" + data_source + "/" + "file10", None,
@@ -137,9 +148,19 @@ producer.wait_requests_finished(50000)
 n = producer.get_requests_queue_size()
 assert_eq(n, 0, "requests in queue")
 
-#stream infos
+# pool limits (checking volume only)
+data = np.arange(1000000, dtype=np.float64)
+producer.set_requests_queue_limits(0,1)
+try:
+    producer.send(11, "processed/bla", data)
+except asapo_producer.AsapoRequestsPoolIsFull as e:
+    print(e)
+else:
+    print("should be AsapoRequestsPoolIsFull error ")
+    sys.exit(1)
 
 
+#stream infos
 info = producer.stream_info()
 assert_eq(info['lastId'], 10, "stream_info last id")
 assert_eq(info['name'], "default", "stream_info name")
diff --git a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
index f3dd3760212d01bf26192f8c37bfbf2d6b72fda5..71219ec2eb4868eacbda8b0ca9c0121a6178f16f 100644
--- a/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_datasets/check_windows.bat
@@ -11,7 +11,7 @@ call start_services.bat
 
 mkdir %receiver_folder%
 
-"%1" localhost:8400 %beamtime_id% 100 1 1 0 30 3
+"%1" 127.0.0.1:8400 %beamtime_id% 100 1 1 0 30 3
 
 ping 192.0.2.1 -n 1 -w 1000 > nul
 
diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
index e9e6e758612c236798527fc5e4b210def915b5fa..ce3967d729f3737647b4302c4e9d07994c80e9ea 100644
--- a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat
@@ -11,14 +11,14 @@ call start_services.bat
 
 mkdir %receiver_folder%
 
-"%1" localhost:8400 %beamtime_id% 100 1 1 0 30
+"%1" 127.0.0.1:8400 %beamtime_id% 100 1 1 0 30
 
 ping 192.0.2.1 -n 1 -w 1000 > nul
 
 FOR /F "usebackq" %%A IN ('%receiver_folder%\processed\1') DO set size=%%~zA
 if %size% NEQ 100000 goto :error
 
-"%1" localhost:8400 wrong_id 100 1 1 0 2 2>1 | findstr /c:"authorization"  || goto :error
+"%1" 127.0.0.1:8400 wrong_id 100 1 1 0 2 2>1 | findstr /c:"authorization"  || goto :error
 
 goto :clean
 
diff --git a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
index c1d0c0e22d549aaf490acb4f59fd47832c2a3c97..7ddac5f47c1166e58d6ae55645a2cbea9ed1c07b 100644
--- a/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
+++ b/tests/automatic/producer_receiver/transfer_single_file_bypass_buffer/check_windows.bat
@@ -11,7 +11,7 @@ call start_services.bat
 
 mkdir %receiver_folder%
 
-"%1" localhost:8400 %beamtime_id% 60000 1 1 0 30
+"%1" 127.0.0.1:8400 %beamtime_id% 60000 1 1 0 30
 
 ping 192.0.2.1 -n 1 -w 1000 > nul
 
diff --git a/tests/automatic/settings/admin_token.key b/tests/automatic/settings/admin_token.key
new file mode 100644
index 0000000000000000000000000000000000000000..eaffcbbc648302e631187cfd1b4c9eeed73c457a
--- /dev/null
+++ b/tests/automatic/settings/admin_token.key
@@ -0,0 +1 @@
+eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjMTkyc29qaXB0MzB1dGQ3bDdhZyIsInN1YiI6ImFkbWluIiwiRXh0cmFDbGFpbXMiOnsiQWNjZXNzVHlwZXMiOlsiY3JlYXRlIl19fQ.gVEFtqaAcP9HSzttWX2GrNBaM52np5k8k-7BqDAJ3xw
\ No newline at end of file
diff --git a/tests/automatic/settings/auth_secret_admin.key b/tests/automatic/settings/auth_secret_admin.key
new file mode 100644
index 0000000000000000000000000000000000000000..3eb59062c67f44eb713096536762d82300c1dee5
--- /dev/null
+++ b/tests/automatic/settings/auth_secret_admin.key
@@ -0,0 +1 @@
+12c2ljwewezgnea
\ No newline at end of file
diff --git a/tests/automatic/settings/authorizer_settings.json.tpl.lin b/tests/automatic/settings/authorizer_settings.json.tpl.lin
index 1c411f2b66702fe72e7b225bd1e1fe9ffb3b57ae..e3cc0585baeeb84ac5963eb14025c63f38a6a49d 100644
--- a/tests/automatic/settings/authorizer_settings.json.tpl.lin
+++ b/tests/automatic/settings/authorizer_settings.json.tpl.lin
@@ -5,7 +5,8 @@
   "beamline-path":"/tmp/asapo/receiver/files/beamline/test/current"},
   {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test1/2019/data/asapo_test1"},
   {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"/tmp/asapo/receiver/files/test_facility/gpfs/test2/2019/data/asapo_test2"}],
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
   {
diff --git a/tests/automatic/settings/authorizer_settings.json.tpl.win b/tests/automatic/settings/authorizer_settings.json.tpl.win
index e0fd183c0f8d472f991f6e64c8ae019548393ae4..31ea1eb10749c7845824160e5b5f771ab4efb485 100644
--- a/tests/automatic/settings/authorizer_settings.json.tpl.win
+++ b/tests/automatic/settings/authorizer_settings.json.tpl.win
@@ -5,7 +5,8 @@
   "beamline-path":"c:\\tmp\\asapo\\receiver\\files\\beamline\\test\\current"},
   {"beamtimeId":"asapo_test1","beamline":"test1","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test1\\2019\\data\\asapo_test1"},
   {"beamtimeId":"asapo_test2","beamline":"test2","core-path":"c:\\tmp\\asapo\\receiver\\files\\test_facility\\gpfs\\test2\\2019\\data\\asapo_test2"}],
-  "SecretFile":"auth_secret.key",
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key",
   "TokenDurationMin":600,
   "Ldap":
   {
diff --git a/tests/automatic/settings/broker_settings.json b/tests/automatic/settings/broker_settings.json
index a80cbbccdaa650fbfc2da3570dceb3eb392acbae..a6fb5a48041ae1550dd1f5ffe797929ebac2b0bd 100644
--- a/tests/automatic/settings/broker_settings.json
+++ b/tests/automatic/settings/broker_settings.json
@@ -1,9 +1,10 @@
 {
   "DatabaseServer":"127.0.0.1:27017",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
+  "AuthorizationServer": "localhost:8400/asapo-authorizer",
   "PerformanceDbName": "db_test",
   "Port":5005,
   "LogLevel":"info",
-  "CheckResendInterval":0,
-  "SecretFile":"auth_secret.key"
+  "CheckResendInterval":0
 }
\ No newline at end of file
diff --git a/tests/automatic/settings/broker_settings.json.tpl b/tests/automatic/settings/broker_settings.json.tpl
index 81860d6aef6faaeb4c81bf0462500a440456ce1e..b79228a7a658f1e465a60a21f4692fb633a5df66 100644
--- a/tests/automatic/settings/broker_settings.json.tpl
+++ b/tests/automatic/settings/broker_settings.json.tpl
@@ -1,10 +1,11 @@
 {
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
+  "AuthorizationServer": "localhost:8400/asapo-authorizer",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
   "CheckResendInterval":0,
   "PerformanceDbName": "db_test",
   "Port":{{ env "NOMAD_PORT_broker" }},
-  "LogLevel":"info",
-  "SecretFile":"auth_secret.key"
+  "LogLevel":"info"
 }
\ No newline at end of file
diff --git a/tests/automatic/settings/receiver_fabric.json.tpl.lin.in b/tests/automatic/settings/receiver_fabric.json.tpl.lin.in
index 2138a3296e52ba3c6dd355938c021eb05edbbbf6..6cf20b1cde759790e4373b650413524c0eb10d86 100644
--- a/tests/automatic/settings/receiver_fabric.json.tpl.lin.in
+++ b/tests/automatic/settings/receiver_fabric.json.tpl.lin.in
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
diff --git a/tests/automatic/settings/receiver_tcp.json.tpl.lin.in b/tests/automatic/settings/receiver_tcp.json.tpl.lin.in
index 6061a8a9fd8b7438790b209e8d77c756147a629c..a6f98fe37d8b198a61170a5578f0f212c38b5480 100644
--- a/tests/automatic/settings/receiver_tcp.json.tpl.lin.in
+++ b/tests/automatic/settings/receiver_tcp.json.tpl.lin.in
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
diff --git a/tests/automatic/settings/receiver_tcp.json.tpl.win.in b/tests/automatic/settings/receiver_tcp.json.tpl.win.in
index f96debf98172ac42fd4ce2854d3d7a5c265b3873..c0989d12e5dc83a5f0293976f68493f1eb8fc2b4 100644
--- a/tests/automatic/settings/receiver_tcp.json.tpl.win.in
+++ b/tests/automatic/settings/receiver_tcp.json.tpl.win.in
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/asapo-discovery",
diff --git a/tests/manual/broker_debug_local/authorizer.json.tpl b/tests/manual/broker_debug_local/authorizer.json.tpl
index 9ad08b7e6111f19abd222aa13fa07abf1fb9025e..92292267951f4e5ddb9017fa3df32293531e3719 100644
--- a/tests/manual/broker_debug_local/authorizer.json.tpl
+++ b/tests/manual/broker_debug_local/authorizer.json.tpl
@@ -4,7 +4,8 @@
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test1","beamline":"test1","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test2","beamline":"test2","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/broker_debug_local/broker.json b/tests/manual/broker_debug_local/broker.json
index 11c716e064c29638fa1dc000fce31b35aece8f69..cb0155b9ccc950acfb202e8f403c3dda0f229609 100644
--- a/tests/manual/broker_debug_local/broker.json
+++ b/tests/manual/broker_debug_local/broker.json
@@ -1,10 +1,11 @@
 {
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/discovery",
+  "AuthorizationServer": "localhost:8400/asapo-authorizer",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
   "CheckResendInterval":10,
   "PerformanceDbName": "db_test",
   "Port": 5005,
-  "LogLevel":"info",
-  "SecretFile":"auth_secret.key"
+  "LogLevel":"info"
 }
diff --git a/tests/manual/broker_debug_local/receiver.json b/tests/manual/broker_debug_local/receiver.json
index 8a358c98cbb01faf364b65eb56942638c3c57133..3dfd35396ecf0c78607e5a9983ac33b28a64428c 100644
--- a/tests/manual/broker_debug_local/receiver.json
+++ b/tests/manual/broker_debug_local/receiver.json
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "DiscoveryServer": "localhost:8400/discovery",
diff --git a/tests/manual/broker_debug_local/receiver.json.tpl b/tests/manual/broker_debug_local/receiver.json.tpl
index 02e0441d517806b27fbd6ed906cf97519188cdba..4de57e97bac84e89d88767697ef160ec04e85b39 100644
--- a/tests/manual/broker_debug_local/receiver.json.tpl
+++ b/tests/manual/broker_debug_local/receiver.json.tpl
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/discovery",
diff --git a/tests/manual/performance_broker/settings.json b/tests/manual/performance_broker/settings.json
index 76e84d085f3e8550ddfb0d3e54b15ff18059c116..b67ac89cdefb2b90aca59811a3675a8351eec59a 100644
--- a/tests/manual/performance_broker/settings.json
+++ b/tests/manual/performance_broker/settings.json
@@ -1,9 +1,10 @@
 {
   "DatabaseServer":"localhost:27017",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
+  "AuthorizationServer": "localhost:5007",
   "PerformanceDbName": "db_test",
   "Port":5005,
   "LogLevel":"info",
-  "CheckResendInterval":10,
-  "SecretFile":"auth_secret.key"
+  "CheckResendInterval":10
 }
\ No newline at end of file
diff --git a/tests/manual/performance_broker_receiver/getlast_broker.cpp b/tests/manual/performance_broker_receiver/getlast_broker.cpp
index 59011a35aeee20a613a19c85b37a8d264715e4fc..f6a3c1a9270c5ec2de4e63a8ecd8345a0e02fa42 100644
--- a/tests/manual/performance_broker_receiver/getlast_broker.cpp
+++ b/tests/manual/performance_broker_receiver/getlast_broker.cpp
@@ -169,7 +169,6 @@ int ReadAllData(const Args& params, uint64_t* duration_ms, int* nerrors, int* nb
 }
 
 int main(int argc, char* argv[]) {
-    asapo::ExitAfterPrintVersionIfNeeded("GetLast consumer Example", argc, argv);
     Args params;
     params.datasets = false;
     if (argc != 9 && argc != 10) {
diff --git a/tests/manual/performance_full_chain_simple/authorizer.json b/tests/manual/performance_full_chain_simple/authorizer.json
index 8400681c58b4a9f2dde30e227686e4e9388b5bc8..be04b954b994f0317372212b67886a726a25e8f8 100644
--- a/tests/manual/performance_full_chain_simple/authorizer.json
+++ b/tests/manual/performance_full_chain_simple/authorizer.json
@@ -2,7 +2,8 @@
   "Port": 5007,
   "LogLevel":"info",
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/performance_full_chain_simple/broker.json b/tests/manual/performance_full_chain_simple/broker.json
index 76e84d085f3e8550ddfb0d3e54b15ff18059c116..b67ac89cdefb2b90aca59811a3675a8351eec59a 100644
--- a/tests/manual/performance_full_chain_simple/broker.json
+++ b/tests/manual/performance_full_chain_simple/broker.json
@@ -1,9 +1,10 @@
 {
   "DatabaseServer":"localhost:27017",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
+  "AuthorizationServer": "localhost:5007",
   "PerformanceDbName": "db_test",
   "Port":5005,
   "LogLevel":"info",
-  "CheckResendInterval":10,
-  "SecretFile":"auth_secret.key"
+  "CheckResendInterval":10
 }
\ No newline at end of file
diff --git a/tests/manual/performance_full_chain_simple/receiver.json b/tests/manual/performance_full_chain_simple/receiver.json
index 3b4bfd1a0223e5040fe36d04d4c07f3378b24899..3c36dadd38dfa8d90994b45019396eefc119d44f 100644
--- a/tests/manual/performance_full_chain_simple/receiver.json
+++ b/tests/manual/performance_full_chain_simple/receiver.json
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "AuthorizationServer": "localhost:5007",
diff --git a/tests/manual/performance_producer_receiver/authorizer.json b/tests/manual/performance_producer_receiver/authorizer.json
index 8400681c58b4a9f2dde30e227686e4e9388b5bc8..be04b954b994f0317372212b67886a726a25e8f8 100644
--- a/tests/manual/performance_producer_receiver/authorizer.json
+++ b/tests/manual/performance_producer_receiver/authorizer.json
@@ -2,7 +2,8 @@
   "Port": 5007,
   "LogLevel":"info",
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/performance_producer_receiver/receiver.json b/tests/manual/performance_producer_receiver/receiver.json
index 485e80876e9e42b618d8ff4387bcc9899ba6cfb4..743e5035b20f458bb8183a475fb43d0427581a61 100644
--- a/tests/manual/performance_producer_receiver/receiver.json
+++ b/tests/manual/performance_producer_receiver/receiver.json
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "DiscoveryServer": "localhost:8400/discovery",
diff --git a/tests/manual/producer_cpp/producer.cpp b/tests/manual/producer_cpp/producer.cpp
index c198a7506144518a54c0f81f5c15aea2935c2a49..03607b93839e1599617ad19286a76e593734c748 100644
--- a/tests/manual/producer_cpp/producer.cpp
+++ b/tests/manual/producer_cpp/producer.cpp
@@ -1,7 +1,7 @@
 #include <thread>
 #include <chrono>
 #include "asapo/asapo_producer.h"
-
+#include <iostream>
 
 void ProcessAfterSend(asapo::RequestCallbackPayload payload, asapo::Error err) {
     if (err) {
diff --git a/tests/manual/python_tests/producer/authorizer.json.tpl b/tests/manual/python_tests/producer/authorizer.json.tpl
index 9ad08b7e6111f19abd222aa13fa07abf1fb9025e..92292267951f4e5ddb9017fa3df32293531e3719 100644
--- a/tests/manual/python_tests/producer/authorizer.json.tpl
+++ b/tests/manual/python_tests/producer/authorizer.json.tpl
@@ -4,7 +4,8 @@
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test1","beamline":"test1","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test2","beamline":"test2","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/python_tests/producer/receiver.json.tpl b/tests/manual/python_tests/producer/receiver.json.tpl
index 93539e5f794765ab6e40d2a589c3cd0d402a4569..14a4032cde80ba693285b7f03f1100dc49fbcba3 100644
--- a/tests/manual/python_tests/producer/receiver.json.tpl
+++ b/tests/manual/python_tests/producer/receiver.json.tpl
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "DiscoveryServer": "localhost:8400/discovery",
diff --git a/tests/manual/python_tests/producer_wait_bug_mongo/authorizer.json.tpl b/tests/manual/python_tests/producer_wait_bug_mongo/authorizer.json.tpl
index 9ad08b7e6111f19abd222aa13fa07abf1fb9025e..92292267951f4e5ddb9017fa3df32293531e3719 100644
--- a/tests/manual/python_tests/producer_wait_bug_mongo/authorizer.json.tpl
+++ b/tests/manual/python_tests/producer_wait_bug_mongo/authorizer.json.tpl
@@ -4,7 +4,8 @@
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test1","beamline":"test1","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test2","beamline":"test2","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/python_tests/producer_wait_bug_mongo/receiver.json.tpl b/tests/manual/python_tests/producer_wait_bug_mongo/receiver.json.tpl
index 93539e5f794765ab6e40d2a589c3cd0d402a4569..14a4032cde80ba693285b7f03f1100dc49fbcba3 100644
--- a/tests/manual/python_tests/producer_wait_bug_mongo/receiver.json.tpl
+++ b/tests/manual/python_tests/producer_wait_bug_mongo/receiver.json.tpl
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "DiscoveryServer": "localhost:8400/discovery",
diff --git a/tests/manual/receiver_debug_local/authorizer.json.tpl b/tests/manual/receiver_debug_local/authorizer.json.tpl
index 9ad08b7e6111f19abd222aa13fa07abf1fb9025e..92292267951f4e5ddb9017fa3df32293531e3719 100644
--- a/tests/manual/receiver_debug_local/authorizer.json.tpl
+++ b/tests/manual/receiver_debug_local/authorizer.json.tpl
@@ -4,7 +4,8 @@
   "AlwaysAllowedBeamtimes":[{"beamtimeId":"asapo_test","beamline":"test","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test1","beamline":"test1","Year":"2019","Facility":"test_facility"},
   {"beamtimeId":"asapo_test2","beamline":"test2","Year":"2019","Facility":"test_facility"}],
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
 
 
diff --git a/tests/manual/receiver_debug_local/broker.json.tpl b/tests/manual/receiver_debug_local/broker.json.tpl
index 9c840220c40110c613cc41dd55d82b5704311823..968227cab95b7ca440592a2e1c3c0c2c9556d48b 100644
--- a/tests/manual/receiver_debug_local/broker.json.tpl
+++ b/tests/manual/receiver_debug_local/broker.json.tpl
@@ -2,9 +2,11 @@
   "DatabaseServer":"auto",
   "DiscoveryServer": "localhost:8400/discovery",
   "PerformanceDbServer": "localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "Port":{{ env "NOMAD_PORT_broker" }},
   "LogLevel":"info",
   "CheckResendInterval":10,
-  "SecretFile":"auth_secret.key"
+  "UserSecretFile":"auth_secret.key",
+  "AdminSecretFile":"auth_secret_admin.key"
 }
\ No newline at end of file
diff --git a/tests/manual/receiver_debug_local/receiver.json b/tests/manual/receiver_debug_local/receiver.json
index 8a358c98cbb01faf364b65eb56942638c3c57133..3dfd35396ecf0c78607e5a9983ac33b28a64428c 100644
--- a/tests/manual/receiver_debug_local/receiver.json
+++ b/tests/manual/receiver_debug_local/receiver.json
@@ -1,5 +1,6 @@
 {
   "PerformanceDbServer":"localhost:8086",
+  "MonitorPerformance": true,
   "PerformanceDbName": "db_test",
   "DatabaseServer":"localhost:27017",
   "DiscoveryServer": "localhost:8400/discovery",