diff --git a/broker/src/asapo_broker/main/broker.go b/broker/src/asapo_broker/main/broker.go
index f9a41942ccc4a60893202d9e319ed066c73b6aca..202022471fca930a7915561f433806c6dc9eb3aa 100644
--- a/broker/src/asapo_broker/main/broker.go
+++ b/broker/src/asapo_broker/main/broker.go
@@ -39,6 +39,8 @@ func main() {
 
 	log.SetLevel(logLevel)
 
+	server.CreateDiscoveryService()
+
 	err = server.InitDB(NewDefaultDatabase())
 	if err != nil {
 		log.Fatal(err.Error())
diff --git a/broker/src/asapo_broker/server/process_request.go b/broker/src/asapo_broker/server/process_request.go
index 39d6bef854c855fc0c4b0e83bbf0b31e893914e7..27c23cec4bdd2d7c62153ed18e742af7392033a9 100644
--- a/broker/src/asapo_broker/server/process_request.go
+++ b/broker/src/asapo_broker/server/process_request.go
@@ -30,7 +30,7 @@ func checkGroupID(w http.ResponseWriter, needGroupID bool, group_id string, db_n
 	}
 	if _, err := xid.FromString(group_id); err != nil {
 		err_str := "wrong groupid " + group_id
-		log_str := "processing get " + op + " request in " + db_name + " at " + settings.BrokerDbAddress + ": " + err_str
+		log_str := "processing get " + op + " request in " + db_name + " at " + settings.DatabaseServer + ": " + err_str
 		logger.Error(log_str)
 		fmt.Println(log_str)
 		w.WriteHeader(http.StatusBadRequest)
@@ -90,7 +90,7 @@ func processRequestInDb(db_name string, group_id string, op string, extra_param
 	defer db_new.Close()
 	statistics.IncreaseCounter()
 	answer, err := db_new.ProcessRequest(db_name, group_id, op, extra_param)
-	log_str := "processing request " + op + " in " + db_name + " at " + settings.BrokerDbAddress
+	log_str := "processing request " + op + " in " + db_name + " at " + settings.DatabaseServer
 	if err != nil {
 		return returnError(err, log_str)
 	}
diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go
index 8cda3b9967a66f06a5639f5b99f4d01a498a9be8..d53a67d592cfe4b7788302af4cbfc5d805249e46 100644
--- a/broker/src/asapo_broker/server/request_common.go
+++ b/broker/src/asapo_broker/server/request_common.go
@@ -7,7 +7,7 @@ import (
 )
 
 func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string, err string) {
-	log_str := "processing " + requestName + " request in " + db_name + " at " + settings.BrokerDbAddress
+	log_str := "processing " + requestName + " request in " + db_name + " at " + settings.DatabaseServer
 	logger.Error(log_str + " - " + err)
 	w.WriteHeader(http.StatusUnauthorized)
 	w.Write([]byte(err))
diff --git a/broker/src/asapo_broker/server/server.go b/broker/src/asapo_broker/server/server.go
index 39213b7b1c3edd8e8edcb7e5bebf9d98ed5802d0..1e9b930b8720efaa9eda2c7b80695cbd872b4910 100644
--- a/broker/src/asapo_broker/server/server.go
+++ b/broker/src/asapo_broker/server/server.go
@@ -2,15 +2,20 @@ package server
 
 import (
 	"asapo_broker/database"
+	log "asapo_common/logger"
 	"asapo_common/utils"
+	"errors"
+	"io/ioutil"
+	"net/http"
 )
 
 var db database.Agent
 
 type serverSettings struct {
-	BrokerDbAddress  string
-	MonitorDbAddress string
-	MonitorDbName    string
+	DiscoveryServer  string
+	DatabaseServer  string
+	PerformanceDbServer string
+	PerformanceDbName    string
 	SecretFile       string
 	Port             int
 	LogLevel         string
@@ -20,12 +25,44 @@ var settings serverSettings
 var statistics serverStatistics
 var auth utils.Auth
 
-func InitDB(dbAgent database.Agent) error {
+type discoveryAPI struct {
+	Client  *http.Client
+	baseURL string
+}
+
+var discoveryService discoveryAPI
+
+func (api *discoveryAPI) GetMongoDbAddress() (string, error) {
+	resp, err := api.Client.Get(api.baseURL + "/mongo")
+	if err != nil {
+		return "", err
+	}
+	defer resp.Body.Close()
+	body, err := ioutil.ReadAll(resp.Body)
+	return string(body), err
+}
+
+func InitDB(dbAgent database.Agent) (err error) {
 	db = dbAgent
-	err := db.Connect(settings.BrokerDbAddress)
+	if settings.DatabaseServer == "auto" {
+		settings.DatabaseServer, err = discoveryService.GetMongoDbAddress()
+		if err != nil {
+			return err
+		}
+		if settings.DatabaseServer == "" {
+			return errors.New("no database servers found")
+		}
+		log.Debug("Got mongodb server: " + settings.DatabaseServer)
+	}
+
+	err = db.Connect(settings.DatabaseServer)
 	return err
 }
 
+func CreateDiscoveryService() {
+	discoveryService = discoveryAPI{&http.Client{}, "http://" + settings.DiscoveryServer}
+}
+
 func CleanupDB() {
 	if db != nil {
 		db.Close()
diff --git a/broker/src/asapo_broker/server/server_nottested.go b/broker/src/asapo_broker/server/server_nottested.go
index 943277aa75da290758fc6124952a806c8a18e4b1..990cf1409e6f133ccd48641647a9ea13350d0e35 100644
--- a/broker/src/asapo_broker/server/server_nottested.go
+++ b/broker/src/asapo_broker/server/server_nottested.go
@@ -38,20 +38,24 @@ func ReadConfig(fname string) (log.Level, error) {
 		return log.FatalLevel, err
 	}
 
-	if settings.BrokerDbAddress == "" {
-		return log.FatalLevel, errors.New("BrokerDbAddress not set")
+	if settings.DatabaseServer == "" {
+		return log.FatalLevel, errors.New("DatabaseServer not set")
 	}
 
-	if settings.MonitorDbAddress == "" {
-		return log.FatalLevel, errors.New("MonitorDbAddress not set")
+	if settings.PerformanceDbServer == "" {
+		return log.FatalLevel, errors.New("PerformanceDbServer not set")
+	}
+
+	if settings.DatabaseServer == "auto" && settings.DiscoveryServer == "" {
+		return log.FatalLevel, errors.New("DiscoveryServer not set for auto DatabaseServer")
 	}
 
 	if settings.Port == 0 {
 		return log.FatalLevel, errors.New("Server port not set")
 	}
 
-	if settings.MonitorDbName == "" {
-		return log.FatalLevel, errors.New("MonitorDbName not set")
+	if settings.PerformanceDbName == "" {
+		return log.FatalLevel, errors.New("PerformanceDbName not set")
 	}
 
 	if settings.SecretFile == "" {
diff --git a/broker/src/asapo_broker/server/server_test.go b/broker/src/asapo_broker/server/server_test.go
index 6654b67159137c566cfcc7e97dc3bfd42070e0d8..6f35c3a3ff194c492fc51c2cf065092ba19341c9 100644
--- a/broker/src/asapo_broker/server/server_test.go
+++ b/broker/src/asapo_broker/server/server_test.go
@@ -6,6 +6,8 @@ import (
 	"errors"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/mock"
+	"net/http"
+	"net/http/httptest"
 	"testing"
 )
 
@@ -37,7 +39,7 @@ func TestInitDBWithWrongAddress(t *testing.T) {
 
 	mock_db.ExpectedCalls = nil
 
-	settings.BrokerDbAddress = "0.0.0.0:0000"
+	settings.DatabaseServer = "0.0.0.0:0000"
 
 	for _, test := range initDBTests {
 		mock_db.On("Connect", "0.0.0.0:0000").Return(test.answer)
@@ -50,6 +52,28 @@ func TestInitDBWithWrongAddress(t *testing.T) {
 	db = nil
 }
 
+func TestInitDBWithAutoAddress(t *testing.T) {
+	mock_db := setup()
+
+	mock_db.ExpectedCalls = nil
+
+	settings.DatabaseServer = "auto"
+	mock_server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
+		assert.Equal(t, req.URL.String(), "/mongo", "request string")
+		rw.Write([]byte("0.0.0.0:0000"))
+	}))
+	defer mock_server.Close()
+
+	discoveryService = discoveryAPI{mock_server.Client(), mock_server.URL}
+
+	mock_db.On("Connect", "0.0.0.0:0000").Return(nil)
+	err := InitDB(mock_db)
+
+	assert.Equal(t, nil, err, "auto connect ok")
+	assertExpectations(t, mock_db)
+	db = nil
+}
+
 func TestCleanupDBWithoutInit(t *testing.T) {
 	mock_db := setup()
 
diff --git a/broker/src/asapo_broker/server/statistics.go b/broker/src/asapo_broker/server/statistics.go
index 97c6896f79252fb8f732bf6c1b6b5ba4f6bd8ac9..273e24919bf50b44e6b9ef8699fff1220c7c28fa 100644
--- a/broker/src/asapo_broker/server/statistics.go
+++ b/broker/src/asapo_broker/server/statistics.go
@@ -47,7 +47,7 @@ func (st *serverStatistics) WriteStatistic() (err error) {
 func (st *serverStatistics) Monitor() {
 	for {
 		time.Sleep(1000 * time.Millisecond)
-		logstr := "sending statistics to " + settings.MonitorDbAddress + ", dbname: " + settings.MonitorDbName
+		logstr := "sending statistics to " + settings.PerformanceDbServer + ", dbname: " + settings.PerformanceDbName
 		if err := st.WriteStatistic(); err != nil {
 			log.Error(logstr + " - " + err.Error())
 		} else {
diff --git a/broker/src/asapo_broker/server/statistics_writers.go b/broker/src/asapo_broker/server/statistics_writers.go
index 4ac374603f4960a91a587e4b9af3e13f3c1fdb93..cdad6dcb3738503ff48fee5fef00f904cb9cdd33 100644
--- a/broker/src/asapo_broker/server/statistics_writers.go
+++ b/broker/src/asapo_broker/server/statistics_writers.go
@@ -21,7 +21,7 @@ type StatisticInfluxDbWriter struct {
 
 func (writer *StatisticInfluxDbWriter) Write(statistics *serverStatistics) error {
 	c, err := client.NewHTTPClient(client.HTTPConfig{
-		Addr: "http://"+ settings.MonitorDbAddress,
+		Addr: "http://"+ settings.PerformanceDbServer,
 	})
 	if err != nil {
 		return err
@@ -29,7 +29,7 @@ func (writer *StatisticInfluxDbWriter) Write(statistics *serverStatistics) error
 	defer c.Close()
 
 	bp, _ := client.NewBatchPoints(client.BatchPointsConfig{
-		Database:  settings.MonitorDbName,
+		Database:  settings.PerformanceDbName,
 	})
 
 	tags := map[string]string{"Group ID": "0"}
diff --git a/common/go/src/asapo_common/utils/stucts.go b/common/go/src/asapo_common/utils/stucts.go
index f9bf6c316d657841c564b184dd9ea9fcc114f505..01b9bb6363af3e2538c450e396daf8de19f8ac6d 100644
--- a/common/go/src/asapo_common/utils/stucts.go
+++ b/common/go/src/asapo_common/utils/stucts.go
@@ -11,10 +11,14 @@ type BrokerInfo struct {
 	StaticEndpoint		 string
 }
 
+type MongoInfo struct {
+	StaticEndpoint		 string
+}
 
 type Settings struct {
 	Receiver 		ReceiverInfo
 	Broker 		    BrokerInfo
+	Mongo 			MongoInfo
 	ConsulEndpoints []string
 	Mode			string
 	Port            int
@@ -23,8 +27,8 @@ type Settings struct {
 
 func (settings *Settings) Validate() error {
 	if settings.Mode != "consul"{
-		if len(settings.Receiver.StaticEndpoints) == 0 || len(settings.Broker.StaticEndpoint) == 0 {
-		return errors.New("receiver or broker endpoints not set")
+		if len(settings.Receiver.StaticEndpoints) == 0 || len(settings.Broker.StaticEndpoint) == 0 || len(settings.Mongo.StaticEndpoint) == 0{
+			return errors.New("static endpoints not set")
 		}
 	}
 
diff --git a/deploy/docker/cluster/Dockerfile b/deploy/docker/cluster/Dockerfile
index 94c777c5b3cb777a88d8728793f5dc3dc09f6c1b..c0182e934023cdc2fa5056ef5c96469450e3109e 100644
--- a/deploy/docker/cluster/Dockerfile
+++ b/deploy/docker/cluster/Dockerfile
@@ -2,12 +2,6 @@ FROM ubuntu:18.04
 
 MAINTAINER DESY IT
 
-ENV CONSUL_VERSION=1.6.0
-ENV NOMAD_VERSION=0.9.5
-ENV TERRAFORM_VERSION=0.12.7
-
-ENV HASHICORP_RELEASES=https://releases.hashicorp.com
-
 RUN apt-get update && apt-get install -y supervisor apt-transport-https \
         ca-certificates \
         curl \
@@ -24,6 +18,11 @@ RUN add-apt-repository \
 RUN apt-get update && apt-get install -y docker-ce-cli wget unzip
 
 
+ENV CONSUL_VERSION=1.6.0
+ENV NOMAD_VERSION=0.9.5
+ENV TERRAFORM_VERSION=0.12.8
+ENV HASHICORP_RELEASES=https://releases.hashicorp.com
+
 RUN set -eux && \
     mkdir -p /tmp/build && \
     cd /tmp/build && \
@@ -37,10 +36,14 @@ RUN set -eux && \
     rm -rf /tmp/build && \
 # tiny smoke test to ensure the binary we downloaded runs
     consul version && \
-    nomad version
+    nomad version && \
+    terraform version
 
 ADD supervisord.conf /etc/
 
 RUN mkdir -p /var/log/supervisord/
 
+COPY scripts/ /var/run/asapo/
+
+
 ENTRYPOINT ["supervisord", "--configuration", "/etc/supervisord.conf"]
diff --git a/deploy/docker/cluster/init_influxdb.sh b/deploy/docker/cluster/init_influxdb.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bda6960f2d1dd592801c9e6076200524a690aa6f
--- /dev/null
+++ b/deploy/docker/cluster/init_influxdb.sh
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+influx=`dig +short @127.0.0.1 -p 8600 influxdb.service.asapo | head -1`
+
+databases="asapo_receivers asapo_brokers"
+
+for database in $databases
+do
+    curl -i -XPOST http://${influx}:8086/query --data-urlencode "q=CREATE DATABASE $database"
+done
diff --git a/deploy/docker/cluster/jobs/asapo.tf b/deploy/docker/cluster/jobs/asapo.tf
deleted file mode 100644
index 8e0496b35fafc85ba6a2ef6f53d8e1ca01bc632a..0000000000000000000000000000000000000000
--- a/deploy/docker/cluster/jobs/asapo.tf
+++ /dev/null
@@ -1,50 +0,0 @@
-provider "nomad" {
-  address = "http://localhost:4646"
-}
-
-variable "fluentd_logs" {
- default = true
-}
-
-variable "nginx_version" {
-  default = "latest"
-}
-
-variable "asapo_imagename_suffix" {
-  default = ""
-}
-
-variable "asapo_image_tag" {
-  default = "latest"
-}
-
-variable "shared_dir" {
-  default = "/tmp"
-}
-
-data "template_file" "nginx" {
-  template = "${file("./asapo-nginx.nmd.tpl")}"
-  vars = {
-    nginx_version = "${var.nginx_version}"
-  }
-}
-
-data "template_file" "asapo_services" {
-  template = "${file("./asapo-services.nmd.tpl")}"
-  vars = {
-    image_suffix = "${var.asapo_imagename_suffix}:${var.asapo_image_tag}"
-    shared_dir = "${var.shared_dir}"
-    fluentd_logs = "${var.fluentd_logs}"
-  }
-}
-
-resource "nomad_job" "asapo-nginx" {
-  jobspec = "${data.template_file.nginx.rendered}"
-}
-
-resource "nomad_job" "asapo-services" {
-  jobspec = "${data.template_file.asapo_services.rendered}"
-}
-
-
-
diff --git a/deploy/docker/cluster/jobs/broker.json.tpl b/deploy/docker/cluster/jobs/broker.json.tpl
deleted file mode 100644
index 9bc9f2edbd43ea98d4aac2bce41bac5953a59b7a..0000000000000000000000000000000000000000
--- a/deploy/docker/cluster/jobs/broker.json.tpl
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "BrokerDbAddress":"localhost:8400/mongo",
-  "MonitorDbAddress":"localhost:8400/influxdb",
-  "MonitorDbName": "asapo_brokers",
-  "port":{{ env "NOMAD_PORT_broker" }},
-  "LogLevel":"info",
-  "SecretFile":"/secrets/secret.key"
-}
diff --git a/deploy/docker/cluster/run.sh b/deploy/docker/cluster/run.sh
index 64d07fb89e7e246442247da03f347f689d780d1b..f41fc9497d82f4c377f16ae6912c16924da9caf8 100755
--- a/deploy/docker/cluster/run.sh
+++ b/deploy/docker/cluster/run.sh
@@ -1,2 +1,19 @@
-docker run --privileged --rm -v /var/run/docker.sock:/var/run/docker.sock -v `pwd`/jobs:/usr/local/nomad_jobs --name asapo --net=host -v /tmp/nomad:/tmp/nomad -v /var/lib/docker:/var/lib/docker -d yakser/asapo-cluster
+#!/usr/bin/env bash
+
+NOMAD_ALLOC_HOST_SHARED=/tmp/asapo/container_host_shared/nomad_alloc
+SERVICE_DATA_CLUSTER_SHARED=/tmp/asapo/asapo_cluster_shared/service_data
+DATA_GLOBAL_SHARED=/tmp/asapo/global_shared/data
+
+mkdir -p $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED
+chmod 777 $NOMAD_ALLOC_HOST_SHARED $SERVICE_DATA_CLUSTER_SHARED $DATA_GLOBAL_SHARED
+
+docker run --privileged --rm -v /var/run/docker.sock:/var/run/docker.sock \
+ 	-v /var/lib/docker:/var/lib/docker \
+	-v $NOMAD_ALLOC_HOST_SHARED:$NOMAD_ALLOC_HOST_SHARED \
+	-v $SERVICE_DATA_CLUSTER_SHARED:$SERVICE_DATA_CLUSTER_SHARED \
+	-v $DATA_GLOBAL_SHARED:$DATA_GLOBAL_SHARED \
+	-e NOMAD_ALLOC=$NOMAD_ALLOC_HOST_SHARED \
+	-e TF_VAR_service_dir=$SERVICE_DATA_CLUSTER_SHARED \
+	-e TF_VAR_data_dir=$DATA_GLOBAL_SHARED \
+ 	--name asapo --net=host -d yakser/asapo-cluster
 
diff --git a/deploy/docker/cluster/jobs/asapo-brokers.nmd b/deploy/docker/cluster/scripts/asapo-brokers.nmd.tpl
similarity index 69%
rename from deploy/docker/cluster/jobs/asapo-brokers.nmd
rename to deploy/docker/cluster/scripts/asapo-brokers.nmd.tpl
index eacb8592cd3d6b12757b1e34bfd692334bd97cb1..6de159576a01950716d6381d6dafd0a15db0b599 100644
--- a/deploy/docker/cluster/jobs/asapo-brokers.nmd
+++ b/deploy/docker/cluster/scripts/asapo-brokers.nmd.tpl
@@ -22,18 +22,19 @@ job "asapo-brokers" {
       driver = "docker"
       config {
         network_mode = "host"
-        dns_servers = ["127.0.0.1"]
-        image = "yakser/asapo-broker-dev:feature_virtualized-deployment.latest"
+        image = "yakser/asapo-broker${image_suffix}"
 	    force_pull = true
         volumes = ["local/config.json:/var/lib/broker/config.json"]
-        logging {
-            type = "fluentd"
-            config {
-                fluentd-address = "localhost:9881"
-                fluentd-async-connect = true
-                tag = "asapo.docker"
-            }
+        %{ if fluentd_logs }
+          logging {
+          type = "fluentd"
+          config {
+            fluentd-address = "localhost:9881"
+            fluentd-async-connect = true
+            tag = "asapo.docker"
+          }
         }
+        %{endif}
       }
 
       resources {
@@ -60,14 +61,14 @@ job "asapo-brokers" {
       }
 
       template {
-         source        = "/usr/local/nomad_jobs/broker.json.tpl"
+         source        = "${scripts_dir}/broker.json.tpl"
          destination   = "local/config.json"
          change_mode   = "restart"
       }
 
       template {
-        source        = "/usr/local/nomad_jobs/auth_secret.key"
-        destination   = "secrets/secret.key"
+        source        = "${scripts_dir}/auth_secret.key"
+        destination   = "local/secret.key"
         change_mode   = "restart"
       }
    } #task brokers
diff --git a/deploy/docker/cluster/jobs/asapo-logging.nmd b/deploy/docker/cluster/scripts/asapo-logging.nmd
similarity index 100%
rename from deploy/docker/cluster/jobs/asapo-logging.nmd
rename to deploy/docker/cluster/scripts/asapo-logging.nmd
diff --git a/deploy/docker/cluster/jobs/asapo-mongo.nmd b/deploy/docker/cluster/scripts/asapo-mongo.nmd.tpl
similarity index 83%
rename from deploy/docker/cluster/jobs/asapo-mongo.nmd
rename to deploy/docker/cluster/scripts/asapo-mongo.nmd.tpl
index be05168e3d880c9d15c607985853409f78cb1e11..444afddb21336b6a68dd177c7e2a9a6452a439bc 100644
--- a/deploy/docker/cluster/jobs/asapo-mongo.nmd
+++ b/deploy/docker/cluster/scripts/asapo-mongo.nmd.tpl
@@ -23,16 +23,15 @@ job "asapo-mongo" {
 
       config {
         network_mode = "host"
-        image = "mongo:4.0.0"
-        volumes = ["/${meta.shared_storage}/mongodb:/data/db"]
+        image = "mongo:${mongo_version}"
+        volumes = ["/${service_dir}/mongodb:/data/db"]
       }
 
       resources {
-        cpu    = 1500
-        memory = 12560
+        memory = "${mongo_total_memory_size}"
         network {
           port "mongo" {
-          static = 27017
+          static = "${mongo_port}"
           }
         }
       }
diff --git a/deploy/docker/cluster/jobs/asapo-nginx.nmd.tpl b/deploy/docker/cluster/scripts/asapo-nginx.nmd.tpl
similarity index 94%
rename from deploy/docker/cluster/jobs/asapo-nginx.nmd.tpl
rename to deploy/docker/cluster/scripts/asapo-nginx.nmd.tpl
index 79c972024b87e8750dc3cf9df0cbdff45a575f57..b48075b7d5e1e87e918b56dc2a047db97e065780 100644
--- a/deploy/docker/cluster/jobs/asapo-nginx.nmd.tpl
+++ b/deploy/docker/cluster/scripts/asapo-nginx.nmd.tpl
@@ -59,7 +59,7 @@ job "asapo-nginx" {
       }
 
       template {
-         source        = "/usr/local/nomad_jobs/nginx.conf.tpl"
+         source        = "${scripts_dir}/nginx.conf.tpl"
          destination   = "local/nginx.conf"
          change_mode   = "restart"
       }
diff --git a/deploy/docker/cluster/jobs/asapo-perfmetrics.nmd b/deploy/docker/cluster/scripts/asapo-perfmetrics.nmd.tpl
similarity index 74%
rename from deploy/docker/cluster/jobs/asapo-perfmetrics.nmd
rename to deploy/docker/cluster/scripts/asapo-perfmetrics.nmd.tpl
index bb8db1b2ba4aa1b8254915d17de74f5124de1e91..b9f97011b64e66dd93f9091f7d26cd992f3c4661 100644
--- a/deploy/docker/cluster/jobs/asapo-perfmetrics.nmd
+++ b/deploy/docker/cluster/scripts/asapo-perfmetrics.nmd.tpl
@@ -21,19 +21,20 @@ job "asapo-perfmetrics" {
       driver = "docker"
 
       config {
-        dns_servers = ["127.0.0.1"]
         network_mode = "host"
-        image = "influxdb"
-        volumes = ["/${meta.shared_storage}/influxdb:/var/lib/influxdb"]
+        image = "influxdb:${influxdb_version}"
+        volumes = ["/${service_dir}/influxdb:/var/lib/influxdb"]
+      }
+
+      env {
+        PRE_CREATE_DB="asapo_receivers;asapo_brokers"
       }
 
       resources {
-        cpu    = 1500
-        memory = 32000
+        memory = "${influxdb_total_memory_size}"
         network {
-          mbits = 10
           port "influxdb" {
-          static = 8086
+          static = "${influxdb_port}"
           }
         }
       }
@@ -62,24 +63,21 @@ job "asapo-perfmetrics" {
       driver = "docker"
 
       env {
-        GF_SERVER_DOMAIN = "${attr.unique.hostname}"
+        GF_SERVER_DOMAIN = "$${attr.unique.hostname}"
         GF_SERVER_ROOT_URL = "%(protocol)s://%(domain)s/performance/"
       }
 
       config {
-        dns_servers = ["127.0.0.1"]
         network_mode = "host"
-        image = "grafana/grafana"
-        volumes = ["/${meta.shared_storage}/grafana:/var/lib/grafana"]
+        image = "grafana/grafana:${grafana_version}"
+        volumes = ["/${service_dir}/grafana:/var/lib/grafana"]
       }
 
       resources {
-        cpu    = 1500
-        memory = 2560
+        memory = "${grafana_total_memory_size}"
         network {
-          mbits = 10
           port "grafana" {
-          static = 3000
+          static = "${grafana_port}"
           }
         }
       }
diff --git a/deploy/docker/cluster/jobs/asapo-receivers.nmd b/deploy/docker/cluster/scripts/asapo-receivers.nmd.tpl
similarity index 69%
rename from deploy/docker/cluster/jobs/asapo-receivers.nmd
rename to deploy/docker/cluster/scripts/asapo-receivers.nmd.tpl
index 1d3cf0cb2d870d394223c0e4e03f0a0940e5d847..83b5aeb602dedec7f75b29cb843adc8898908d63 100644
--- a/deploy/docker/cluster/jobs/asapo-receivers.nmd
+++ b/deploy/docker/cluster/scripts/asapo-receivers.nmd.tpl
@@ -24,17 +24,20 @@ job "asapo-receivers" {
       config {
         network_mode = "host"
         dns_servers = ["127.0.0.1"]
-        image = "yakser/asapo-receiver-dev:feature_virtualized-deployment.latest"
+        image = "yakser/asapo-receiver${image_suffix}"
 	    force_pull = true
         volumes = ["local/config.json:/var/lib/receiver/config.json",
-                   "/bldocuments/support/asapo/data:/var/lib/receiver/data"]
+                   "${data_dir}:/var/lib/receiver/data"]
+        %{ if fluentd_logs }
         logging {
-            type = "fluentd"
-            config {
-                fluentd-address = "localhost:9881"
-                tag = "asapo.docker"
-            }
+        type = "fluentd"
+        config {
+        fluentd-address = "localhost:9881"
+        fluentd-async-connect = true
+        tag = "asapo.docker"
         }
+        }
+        %{endif}
       }
 
       resources {
@@ -42,9 +45,11 @@ job "asapo-receivers" {
           port "recv" {}
           port "recv_ds" {}
         }
-          memory = 40000
+          memory = "${receiver_total_memory_size}"
       }
 
+
+
       service {
         name = "asapo-receiver"
         port = "recv"
@@ -63,8 +68,13 @@ job "asapo-receivers" {
         }
       }
 
+      meta {
+        receiver_dataserver_cache_size = "${receiver_dataserver_cache_size}"
+      }
+
+
       template {
-         source        = "/usr/local/nomad_jobs/receiver.json.tpl"
+         source        = "${scripts_dir}/receiver.json.tpl"
          destination   = "local/config.json"
          change_mode   = "restart"
       }
diff --git a/deploy/docker/cluster/jobs/asapo-services.nmd.tpl b/deploy/docker/cluster/scripts/asapo-services.nmd.tpl
similarity index 86%
rename from deploy/docker/cluster/jobs/asapo-services.nmd.tpl
rename to deploy/docker/cluster/scripts/asapo-services.nmd.tpl
index 38056a13e1eccffd63b1bcf5c5039de2bfc7d9ca..3922fb8a5ad75beaafab1ee2b8c22bc12f1cda4a 100644
--- a/deploy/docker/cluster/jobs/asapo-services.nmd.tpl
+++ b/deploy/docker/cluster/scripts/asapo-services.nmd.tpl
@@ -56,12 +56,12 @@ job "asapo-services" {
       }
 
       template {
-         source        = "/usr/local/nomad_jobs/authorizer.json.tpl"
+         source        = "${scripts_dir}/authorizer.json.tpl"
          destination   = "local/config.json"
          change_mode   = "restart"
       }
       template {
-        source        = "/usr/local/nomad_jobs/auth_secret.key"
+        source        = "${scripts_dir}/auth_secret.key"
         destination   = "local/secret.key"
         change_mode   = "restart"
       }
@@ -79,14 +79,16 @@ job "asapo-services" {
         image = "yakser/asapo-discovery${image_suffix}"
 	    force_pull = true
         volumes = ["local/config.json:/var/lib/discovery/config.json"]
+        %{ if fluentd_logs }
         logging {
-            type = "fluentd"
-            config {
-                fluentd-address = "localhost:9881"
-                fluentd-async-connect = true
-                tag = "asapo.docker"
-            }
+        type = "fluentd"
+        config {
+        fluentd-address = "localhost:9881"
+        fluentd-async-connect = true
+        tag = "asapo.docker"
+        }
         }
+        %{endif}
       }
 
       resources {
@@ -119,7 +121,7 @@ job "asapo-services" {
       }
 
       template {
-         source        = "/usr/local/nomad_jobs/discovery.json.tpl"
+         source        = "${scripts_dir}/discovery.json.tpl"
          destination   = "local/config.json"
          change_mode   = "restart"
       }
diff --git a/deploy/docker/cluster/jobs/asapo.auto.tfvars b/deploy/docker/cluster/scripts/asapo.auto.tfvars
similarity index 97%
rename from deploy/docker/cluster/jobs/asapo.auto.tfvars
rename to deploy/docker/cluster/scripts/asapo.auto.tfvars
index 060182276b0fa01388486482fb1d6cbe50e9bf63..62a3bf07419046ffb746890bf61a3fe365ff5be0 100644
--- a/deploy/docker/cluster/jobs/asapo.auto.tfvars
+++ b/deploy/docker/cluster/scripts/asapo.auto.tfvars
@@ -1,4 +1,7 @@
 nginx_version = "1.14"
+
+
+
 asapo_imagename_suffix="-dev"
 asapo_image_tag = "feature_virtualized-deployment.latest"
 
diff --git a/deploy/docker/cluster/jobs/auth_secret.key b/deploy/docker/cluster/scripts/auth_secret.key
similarity index 100%
rename from deploy/docker/cluster/jobs/auth_secret.key
rename to deploy/docker/cluster/scripts/auth_secret.key
diff --git a/deploy/docker/cluster/jobs/authorizer.json.tpl b/deploy/docker/cluster/scripts/authorizer.json.tpl
similarity index 100%
rename from deploy/docker/cluster/jobs/authorizer.json.tpl
rename to deploy/docker/cluster/scripts/authorizer.json.tpl
diff --git a/deploy/docker/cluster/scripts/broker.json.tpl b/deploy/docker/cluster/scripts/broker.json.tpl
new file mode 100644
index 0000000000000000000000000000000000000000..f924faa3aa3bed06176b0ce879dcfded9f21ab6f
--- /dev/null
+++ b/deploy/docker/cluster/scripts/broker.json.tpl
@@ -0,0 +1,9 @@
+{
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
+  "PerformanceDbServer":"localhost:8400/influxdb",
+  "PerformanceDbName": "asapo_brokers",
+  "port":{{ env "NOMAD_PORT_broker" }},
+  "LogLevel":"info",
+  "SecretFile":"/local/secret.key"
+}
diff --git a/deploy/docker/cluster/jobs/discovery.json.tpl b/deploy/docker/cluster/scripts/discovery.json.tpl
similarity index 100%
rename from deploy/docker/cluster/jobs/discovery.json.tpl
rename to deploy/docker/cluster/scripts/discovery.json.tpl
diff --git a/deploy/docker/cluster/jobs/fluentd.conf b/deploy/docker/cluster/scripts/fluentd.conf
similarity index 100%
rename from deploy/docker/cluster/jobs/fluentd.conf
rename to deploy/docker/cluster/scripts/fluentd.conf
diff --git a/deploy/docker/cluster/jobs/kibana.yml b/deploy/docker/cluster/scripts/kibana.yml
similarity index 100%
rename from deploy/docker/cluster/jobs/kibana.yml
rename to deploy/docker/cluster/scripts/kibana.yml
diff --git a/deploy/docker/cluster/jobs/nginx.conf.tpl b/deploy/docker/cluster/scripts/nginx.conf.tpl
similarity index 92%
rename from deploy/docker/cluster/jobs/nginx.conf.tpl
rename to deploy/docker/cluster/scripts/nginx.conf.tpl
index 9c5f88b893e54cb4a27aa9a5916da3ad5aeba392..72a7cefa1e0a1e6f7d5a618ad4cb78363b428a8f 100644
--- a/deploy/docker/cluster/jobs/nginx.conf.tpl
+++ b/deploy/docker/cluster/scripts/nginx.conf.tpl
@@ -22,15 +22,9 @@ http {
           set $fluentd_endpoint fluentd.service.asapo;
           set $kibana_endpoint kibana.service.asapo;
           set $grafana_endpoint grafana.service.asapo;
-          set $mongo_endpoint mongo.service.asapo;
           set $influxdb_endpoint influxdb.service.asapo;
           set $elasticsearch_endpoint elasticsearch.service.asapo;
 
-   		  location /mongo/ {
-            rewrite ^/mongo(/.*) $1 break;
-            proxy_pass http://$mongo_endpoint:27017$uri$is_args$args;
-          }
-
    		  location /influxdb/ {
             rewrite ^/influxdb(/.*) $1 break;
             proxy_pass http://$influxdb_endpoint:8086$uri$is_args$args;
@@ -81,7 +75,6 @@ stream {
         default fluentd.service.asapo;
     }
 
-
     server {
         listen     9881;
         proxy_pass $upstream:24224;
diff --git a/deploy/docker/cluster/scripts/provider.tf b/deploy/docker/cluster/scripts/provider.tf
new file mode 100644
index 0000000000000000000000000000000000000000..19058d84779c97a33d9dafc55181127ee389902e
--- /dev/null
+++ b/deploy/docker/cluster/scripts/provider.tf
@@ -0,0 +1,4 @@
+provider "nomad" {
+  address = "http://localhost:4646"
+}
+
diff --git a/deploy/docker/cluster/jobs/receiver.json.tpl b/deploy/docker/cluster/scripts/receiver.json.tpl
similarity index 66%
rename from deploy/docker/cluster/jobs/receiver.json.tpl
rename to deploy/docker/cluster/scripts/receiver.json.tpl
index 4410725d81a1c6fde5bd8d5ec0a1df639ff387df..b1948f8b1e9d216887c26447220816a062a567da 100644
--- a/deploy/docker/cluster/jobs/receiver.json.tpl
+++ b/deploy/docker/cluster/scripts/receiver.json.tpl
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8400/influxdb",
-  "MonitorDbName": "asapo_receivers",
-  "BrokerDbAddress":"localhost:8400/mongo",
+  "PerformanceDbServer":"localhost:8400/influxdb",
+  "PerformanceDbName": "asapo_receivers",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
   "AuthorizationServer": "localhost:8400/authorizer",
   "AuthorizationInterval": 10000,
   "ListenPort": {{ env "NOMAD_PORT_recv" }},
@@ -11,7 +12,7 @@
   },
   "DataCache": {
     "Use": true,
-    "SizeGB": 30,
+    "SizeGB": {{ env "NOMAD_META_receiver_dataserver_cache_size" }},
     "ReservedShare": 10
   },
   "Tag": "{{ env "NOMAD_ADDR_recv" }}",
diff --git a/deploy/docker/cluster/scripts/resources.tf b/deploy/docker/cluster/scripts/resources.tf
new file mode 100644
index 0000000000000000000000000000000000000000..274425056c0fd89faf27fedf475691ec86702627
--- /dev/null
+++ b/deploy/docker/cluster/scripts/resources.tf
@@ -0,0 +1,23 @@
+resource "nomad_job" "asapo-perfmetrics" {
+  jobspec = "${data.template_file.asapo_perfmetrics.rendered}"
+}
+
+resource "nomad_job" "asapo-mongo" {
+  jobspec = "${data.template_file.asapo_mongo.rendered}"
+}
+
+resource "nomad_job" "asapo-nginx" {
+  jobspec = "${data.template_file.nginx.rendered}"
+}
+
+resource "nomad_job" "asapo-services" {
+  jobspec = "${data.template_file.asapo_services.rendered}"
+}
+
+resource "nomad_job" "asapo-receivers" {
+  jobspec = "${data.template_file.asapo_receivers.rendered}"
+}
+
+resource "nomad_job" "asapo-brokers" {
+  jobspec = "${data.template_file.asapo_brokers.rendered}"
+}
diff --git a/deploy/docker/cluster/scripts/templates.tf b/deploy/docker/cluster/scripts/templates.tf
new file mode 100644
index 0000000000000000000000000000000000000000..edf06bbadac4920b4d17f866bac0020c8f24d7eb
--- /dev/null
+++ b/deploy/docker/cluster/scripts/templates.tf
@@ -0,0 +1,63 @@
+data "template_file" "nginx" {
+  template = "${file("${var.job_scripts_dir}/asapo-nginx.nmd.tpl")}"
+  vars = {
+    scripts_dir = "${var.job_scripts_dir}"
+    nginx_version = "${var.nginx_version}"
+  }
+}
+
+data "template_file" "asapo_services" {
+  template = "${file("${var.job_scripts_dir}/asapo-services.nmd.tpl")}"
+  vars = {
+    scripts_dir = "${var.job_scripts_dir}"
+    image_suffix = "${var.asapo_imagename_suffix}:${var.asapo_image_tag}"
+    fluentd_logs = "${var.fluentd_logs}"
+  }
+}
+
+data "template_file" "asapo_receivers" {
+  template = "${file("${var.job_scripts_dir}/asapo-receivers.nmd.tpl")}"
+  vars = {
+    scripts_dir = "${var.job_scripts_dir}"
+    data_dir = "${var.data_dir}"
+    image_suffix = "${var.asapo_imagename_suffix}:${var.asapo_image_tag}"
+    fluentd_logs = "${var.fluentd_logs}"
+    receiver_total_memory_size = "${var.receiver_total_memory_size}"
+    receiver_dataserver_cache_size = "${var.receiver_dataserver_cache_size}"
+  }
+}
+
+data "template_file" "asapo_brokers" {
+  template = "${file("${var.job_scripts_dir}/asapo-brokers.nmd.tpl")}"
+  vars = {
+    scripts_dir = "${var.job_scripts_dir}"
+    image_suffix = "${var.asapo_imagename_suffix}:${var.asapo_image_tag}"
+    fluentd_logs = "${var.fluentd_logs}"
+  }
+}
+
+
+data "template_file" "asapo_perfmetrics" {
+  template = "${file("${var.job_scripts_dir}/asapo-perfmetrics.nmd.tpl")}"
+  vars = {
+    service_dir = "${var.service_dir}"
+    influxdb_version = "${var.influxdb_version}"
+    grafana_version = "${var.grafana_version}"
+    grafana_total_memory_size = "${var.grafana_total_memory_size}"
+    grafana_port = "${var.grafana_port}"
+    influxdb_total_memory_size = "${var.influxdb_total_memory_size}"
+    influxdb_port = "${var.influxdb_port}"
+
+    }
+}
+
+
+data "template_file" "asapo_mongo" {
+  template = "${file("${var.job_scripts_dir}/asapo-mongo.nmd.tpl")}"
+  vars = {
+    service_dir = "${var.service_dir}"
+    mongo_version = "${var.mongo_version}"
+    mongo_total_memory_size = "${var.mongo_total_memory_size}"
+    mongo_port = "${var.mongo_port}"
+  }
+}
diff --git a/deploy/docker/cluster/scripts/vars.tf b/deploy/docker/cluster/scripts/vars.tf
new file mode 100644
index 0000000000000000000000000000000000000000..8144db2fb5e9c5fc506cf58a6ecd9c14fbf1a970
--- /dev/null
+++ b/deploy/docker/cluster/scripts/vars.tf
@@ -0,0 +1,74 @@
+variable "fluentd_logs" {
+ default = true
+}
+
+variable "nginx_version" {
+  default = "latest"
+}
+
+variable "grafana_version" {
+  default = "latest"
+}
+
+
+variable "influxdb_version" {
+  default = "latest"
+}
+
+variable "asapo_imagename_suffix" {
+  default = ""
+}
+
+variable "asapo_image_tag" {
+#  default = "latest"
+}
+
+variable "job_scripts_dir" {
+  default = "/var/run/asapo"
+}
+
+variable "service_dir" {
+}
+
+variable "data_dir" {
+}
+
+
+variable "receiver_total_memory_size" {
+  default = "2000" #mb
+}
+
+variable "receiver_dataserver_cache_size" {
+  default = "1"
+}
+
+variable "grafana_total_memory_size" {
+  default = "2000" #mb
+}
+
+variable "influxdb_total_memory_size" {
+  default = "2000" #mb
+}
+
+variable "grafana_port" {
+  default = "3000"
+}
+
+variable "influxdb_port" {
+  default = "8086"
+}
+
+
+variable "mongo_port" {
+  default = "27017"
+}
+
+
+variable "mongo_version" {
+  default = "4.0.0"
+}
+
+variable "mongo_total_memory_size" {
+  default = "300"
+}
+
diff --git a/deploy/docker/cluster/supervisord.conf b/deploy/docker/cluster/supervisord.conf
index 0c5c6c7ba41a58b7ca3d4629f64d3f17490904e6..d6cc9070ea2527df05e7a721c288480165c0f3c0 100644
--- a/deploy/docker/cluster/supervisord.conf
+++ b/deploy/docker/cluster/supervisord.conf
@@ -12,5 +12,5 @@ command=/bin/consul agent -dev -client 0.0.0.0 -domain asapo -recursor=8.8.8.8
 #-config-dir=/etc/consul.d
 
 [program:nomad]
-command=/bin/nomad agent -data-dir=/tmp/nomad -dev -client -bind 0.0.0.0
+command=/bin/nomad agent -dev -client -bind 0.0.0.0 -alloc-dir=%(ENV_NOMAD_ALLOC)s
 # -config=/etc/nomad.d
diff --git a/deploy/docker/cluster/tf_run.sh b/deploy/docker/cluster/tf_run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..13bab997ea8dea1d2c26c92ef7dce200c1fd8041
--- /dev/null
+++ b/deploy/docker/cluster/tf_run.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+docker exec -w /var/run/asapo asapo terraform init
+
+docker exec -w /var/run/asapo asapo terraform apply -auto-approve -var fluentd_logs=false
\ No newline at end of file
diff --git a/deploy/nomad_jobs/broker.json.tpl b/deploy/nomad_jobs/broker.json.tpl
index ddaeb14864d09eb6ae5a90cbadef5838ad195297..88f2df6be812c2b3bdd9a2872309b9df4ec94be8 100644
--- a/deploy/nomad_jobs/broker.json.tpl
+++ b/deploy/nomad_jobs/broker.json.tpl
@@ -1,7 +1,8 @@
 {
-  "BrokerDbAddress":"localhost:8400/mongo",
-  "MonitorDbAddress":"localhost:8400/influxdb",
-  "MonitorDbName": "asapo_brokers",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
+  "PerformanceDbServer":"localhost:8400/influxdb",
+  "PerformanceDbName": "asapo_brokers",
   "port":{{ env "NOMAD_PORT_broker" }},
   "LogLevel":"info",
   "SecretFile":"/secrets/secret.key"
diff --git a/deploy/nomad_jobs/nginx.conf.tpl b/deploy/nomad_jobs/nginx.conf.tpl
index c53d8c503feff9ad31c497a3c372d879dd950ff2..20dccdcd4a9bdeaaae74209c4c6df8448fe915d0 100644
--- a/deploy/nomad_jobs/nginx.conf.tpl
+++ b/deploy/nomad_jobs/nginx.conf.tpl
@@ -22,15 +22,9 @@ http {
           set $fluentd_endpoint fluentd.service.asapo;
           set $kibana_endpoint kibana.service.asapo;
           set $grafana_endpoint grafana.service.asapo;
-          set $mongo_endpoint mongo.service.asapo;
           set $influxdb_endpoint influxdb.service.asapo;
           set $elasticsearch_endpoint elasticsearch.service.asapo;
 
-   		  location /mongo/ {
-            rewrite ^/mongo(/.*) $1 break;
-            proxy_pass http://$mongo_endpoint:27017$uri$is_args$args;
-          }
-
    		  location /influxdb/ {
             rewrite ^/influxdb(/.*) $1 break;
             proxy_pass http://$influxdb_endpoint:8086$uri$is_args$args;
diff --git a/deploy/nomad_jobs/receiver.json.tpl b/deploy/nomad_jobs/receiver.json.tpl
index 4410725d81a1c6fde5bd8d5ec0a1df639ff387df..648f8b59fae7f8b41176b64b48023d0cbfe4a266 100644
--- a/deploy/nomad_jobs/receiver.json.tpl
+++ b/deploy/nomad_jobs/receiver.json.tpl
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8400/influxdb",
-  "MonitorDbName": "asapo_receivers",
-  "BrokerDbAddress":"localhost:8400/mongo",
+  "PerformanceDbServer":"localhost:8400/influxdb",
+  "PerformanceDbName": "asapo_receivers",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
   "AuthorizationServer": "localhost:8400/authorizer",
   "AuthorizationInterval": 10000,
   "ListenPort": {{ env "NOMAD_PORT_recv" }},
diff --git a/discovery/src/asapo_discovery/request_handler/request_handler.go b/discovery/src/asapo_discovery/request_handler/request_handler.go
index 7c33af3f8c210c95ef73e37b523b8cfcdc9d253f..f3a036597d13c7171fb0a29ce344e8894f9af818 100644
--- a/discovery/src/asapo_discovery/request_handler/request_handler.go
+++ b/discovery/src/asapo_discovery/request_handler/request_handler.go
@@ -5,6 +5,7 @@ import "asapo_common/utils"
 type Agent interface {
 	GetReceivers() ([]byte, error)
 	GetBroker() ([]byte, error)
+	GetMongo() ([]byte, error)
 	Init(settings utils.Settings) error
 }
 
diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_consul.go b/discovery/src/asapo_discovery/request_handler/request_handler_consul.go
index 9c212504522a2ab0ba2d55a641f4c61293adba18..ceda776a0e2c6c6e79e5a62d37eb2519927cdbe1 100644
--- a/discovery/src/asapo_discovery/request_handler/request_handler_consul.go
+++ b/discovery/src/asapo_discovery/request_handler/request_handler_consul.go
@@ -12,6 +12,7 @@ import (
 type ConsulRequestHandler struct {
 	MaxConnections int
 	client         *api.Client
+	staticHandler *StaticRequestHandler
 }
 
 type Responce struct {
@@ -48,20 +49,30 @@ func (rh *ConsulRequestHandler) GetServices(name string) ([]string, error) {
 }
 
 func (rh *ConsulRequestHandler) GetReceivers() ([]byte, error) {
+	if len(rh.staticHandler.receiverResponce.Uris)>0 {
+		return rh.staticHandler.GetReceivers()
+	}
+
+
+	var response Responce
+	response.MaxConnections = rh.MaxConnections
+
 	if (rh.client == nil) {
 		return nil, errors.New("consul client not connected")
 	}
-	var response Responce
 	var err error
 	response.Uris, err = rh.GetServices("asapo-receiver")
 	if err != nil {
 		return nil, err
 	}
-	response.MaxConnections = rh.MaxConnections
 	return utils.MapToJson(&response)
 }
 
 func (rh *ConsulRequestHandler) GetBroker() ([]byte, error) {
+	if len(rh.staticHandler.broker)>0 {
+		return rh.staticHandler.GetBroker()
+	}
+
 	if (rh.client == nil) {
 		return nil, errors.New("consul client not connected")
 	}
@@ -78,6 +89,28 @@ func (rh *ConsulRequestHandler) GetBroker() ([]byte, error) {
 	return nil, nil
 }
 
+func (rh *ConsulRequestHandler) GetMongo() ([]byte, error) {
+	if len(rh.staticHandler.mongo)>0 {
+		return rh.staticHandler.GetMongo()
+	}
+
+	if (rh.client == nil) {
+		return nil, errors.New("consul client not connected")
+	}
+	response, err := rh.GetServices("asapo-mongo")
+	if err != nil {
+		return nil, err
+	}
+	size := len(response)
+	if size ==0 {
+		return []byte(""),nil
+	}else {
+		return []byte(response[counter.Next(size)]),nil
+	}
+	return nil, nil
+}
+
+
 func (rh *ConsulRequestHandler) connectClient(uri string) (client *api.Client, err error) {
 	config := api.DefaultConfig()
 	if len(uri) > 0 {
@@ -94,6 +127,8 @@ func (rh *ConsulRequestHandler) connectClient(uri string) (client *api.Client, e
 }
 
 func (rh *ConsulRequestHandler) Init(settings utils.Settings) (err error) {
+	rh.staticHandler = new(StaticRequestHandler)
+	rh.staticHandler.Init(settings)
 	rh.MaxConnections = settings.Receiver.MaxConnections
 	if len(settings.ConsulEndpoints) == 0 {
 		rh.client, err = rh.connectClient("")
diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go b/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go
index 002afc6e0f56fdda9ef764f084cae042cf09f530..e1f9d47b0292f86e8c77c95d670fde7f8669088f 100644
--- a/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go
+++ b/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go
@@ -51,6 +51,7 @@ func (suite *ConsulHandlerTestSuite) SetupTest() {
 
 	suite.registerAgents("asapo-receiver")
 	suite.registerAgents("asapo-broker")
+	suite.registerAgents("asapo-mongo")
 
 }
 
@@ -59,6 +60,9 @@ func (suite *ConsulHandlerTestSuite) TearDownTest() {
 	suite.client.Agent().ServiceDeregister("asapo-receiver1235")
 	suite.client.Agent().ServiceDeregister("asapo-broker1234")
 	suite.client.Agent().ServiceDeregister("asapo-broker1235")
+	suite.client.Agent().ServiceDeregister("asapo-mongo1234")
+	suite.client.Agent().ServiceDeregister("asapo-mongo1235")
+
 }
 
 func (suite *ConsulHandlerTestSuite) TestInitDefaultUri() {
@@ -95,6 +99,14 @@ func (suite *ConsulHandlerTestSuite) TestGetReceivers() {
 	suite.Equal("{\"MaxConnections\":10,\"Uris\":[\"127.0.0.1:1234\",\"127.0.0.1:1235\"]}", string(res), "uris")
 }
 
+func (suite *ConsulHandlerTestSuite) TestGetReceiversStatic() {
+	consul_settings.Receiver.StaticEndpoints= []string{"127.0.0.1:0000"}
+	suite.handler.Init(consul_settings)
+	res, err := suite.handler.GetReceivers()
+	suite.NoError(err, "")
+	suite.Equal("{\"MaxConnections\":10,\"Uris\":[\"127.0.0.1:0000\"]}", string(res), "uris")
+}
+
 func (suite *ConsulHandlerTestSuite) TestGetReceiversWhenNotConnected() {
 	consul_settings.ConsulEndpoints = []string{"blabla"}
 	suite.handler.Init(consul_settings)
@@ -126,6 +138,45 @@ func (suite *ConsulHandlerTestSuite) TestGetBrokerRoundRobin() {
 }
 
 
+func (suite *ConsulHandlerTestSuite) TestGetMongoRoundRobin() {
+	suite.handler.Init(consul_settings)
+	res, err := suite.handler.GetMongo()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:1235", string(res), "uris")
+
+	res, err = suite.handler.GetMongo()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:1234", string(res), "uris")
+
+	res, err = suite.handler.GetMongo()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:1235", string(res), "uris")
+}
+
+func (suite *ConsulHandlerTestSuite) TestGetMongoStatic() {
+	consul_settings.Mongo.StaticEndpoint="127.0.0.1:0000"
+	suite.handler.Init(consul_settings)
+	res, err := suite.handler.GetMongo()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:0000", string(res), "uris")
+
+	res, err = suite.handler.GetMongo()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:0000", string(res), "uris")
+}
+
+func (suite *ConsulHandlerTestSuite) TestGetBrokerStatic() {
+	consul_settings.Broker.StaticEndpoint="127.0.0.1:0000"
+	suite.handler.Init(consul_settings)
+	res, err := suite.handler.GetBroker()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:0000", string(res), "uris")
+
+	res, err = suite.handler.GetBroker()
+	suite.NoError(err, "")
+	suite.Equal("127.0.0.1:0000", string(res), "uris")
+}
+
 func (suite *ConsulHandlerTestSuite) TestGetBrokerEmpty() {
 	suite.client.Agent().ServiceDeregister("asapo-broker1234")
 	suite.client.Agent().ServiceDeregister("asapo-broker1235")
diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_static.go b/discovery/src/asapo_discovery/request_handler/request_handler_static.go
index f9668a541b435199b18325830f482869b2a5ff4f..8db0d0eb3a79a8945198b7f3ab86080c3f37167e 100644
--- a/discovery/src/asapo_discovery/request_handler/request_handler_static.go
+++ b/discovery/src/asapo_discovery/request_handler/request_handler_static.go
@@ -7,6 +7,7 @@ import (
 type StaticRequestHandler struct {
 	receiverResponce Responce
 	broker string
+	mongo string
 }
 
 
@@ -18,10 +19,17 @@ func (rh *StaticRequestHandler) GetBroker() ([]byte, error) {
 	return []byte(rh.broker),nil
 }
 
+func (rh *StaticRequestHandler) GetMongo() ([]byte, error) {
+	return []byte(rh.mongo),nil
+}
+
+
 
 func (rh *StaticRequestHandler) Init(settings utils.Settings) error {
 	rh.receiverResponce.MaxConnections = settings.Receiver.MaxConnections
 	rh.receiverResponce.Uris = settings.Receiver.StaticEndpoints
 	rh.broker = settings.Broker.StaticEndpoint
+	rh.mongo = settings.Mongo.StaticEndpoint
+
 	return nil
 }
diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go b/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go
index eaeed6fab59cb018123a9d3e194b5147dd5d7a80..1fab0ce8e805de9cda43776036513e08421e182e 100644
--- a/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go
+++ b/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go
@@ -11,7 +11,7 @@ var uris = []string{"ip1","ip2"}
 const max_conn = 1
 
 var static_settings utils.Settings= utils.Settings{Receiver:utils.ReceiverInfo{MaxConnections:max_conn,StaticEndpoints:uris},Broker:utils.BrokerInfo{
-	StaticEndpoint:"ip_broker"}}
+	StaticEndpoint:"ip_broker"}, Mongo:utils.MongoInfo{StaticEndpoint:"ip_mongo"}}
 
 
 
@@ -35,3 +35,10 @@ func TestStaticHandlerGetBrokerOK(t *testing.T) {
 	assert.Equal(t,string(res), "ip_broker")
 	assert.Nil(t, err)
 }
+
+func TestStaticHandlerGetMongoOK(t *testing.T) {
+	rh.Init(static_settings)
+	res,err := rh.GetMongo()
+	assert.Equal(t,string(res), "ip_mongo")
+	assert.Nil(t, err)
+}
diff --git a/discovery/src/asapo_discovery/server/get_receivers.go b/discovery/src/asapo_discovery/server/get_receivers.go
index 8f946dd7b5781e91b3deb0cbae88735be365a42e..df0e5aa3b42d284b43ac242b5f8ea22e5040cf2a 100644
--- a/discovery/src/asapo_discovery/server/get_receivers.go
+++ b/discovery/src/asapo_discovery/server/get_receivers.go
@@ -15,6 +15,10 @@ func getService(service string) (answer []byte, code int) {
 	case "broker":
 		answer, err = requestHandler.GetBroker()
 		break
+	case "mongo":
+		answer, err = requestHandler.GetMongo()
+		break
+
 	default:
 		err = errors.New("wrong request: "+service)
 	}
@@ -42,3 +46,10 @@ func routeGetBroker(w http.ResponseWriter, r *http.Request) {
 	w.WriteHeader(code)
 	w.Write(answer)
 }
+
+func routeGetMongo(w http.ResponseWriter, r *http.Request) {
+	r.Header.Set("Content-type", "application/json")
+	answer,code := getService("mongo")
+	w.WriteHeader(code)
+	w.Write(answer)
+}
diff --git a/discovery/src/asapo_discovery/server/listroutes.go b/discovery/src/asapo_discovery/server/listroutes.go
index b6f36de2d9a1bb1883b468b4b4efbb1c4960c715..4eb1f5641f6ab8cad8a02d85eee32f706234fd48 100644
--- a/discovery/src/asapo_discovery/server/listroutes.go
+++ b/discovery/src/asapo_discovery/server/listroutes.go
@@ -17,5 +17,11 @@ var listRoutes = utils.Routes{
 		"/broker",
 		routeGetBroker,
 	},
+	utils.Route{
+		"GetMongo",
+		"Get",
+		"/mongo",
+		routeGetMongo,
+	},
 
 }
diff --git a/discovery/src/asapo_discovery/server/routes_test.go b/discovery/src/asapo_discovery/server/routes_test.go
index a983fa2b9ab58094d5d60686e2cfa9c9782eb31c..d838ee181fc3fa1f3d63e1f43798dab129dcf608 100644
--- a/discovery/src/asapo_discovery/server/routes_test.go
+++ b/discovery/src/asapo_discovery/server/routes_test.go
@@ -31,7 +31,7 @@ type GetReceiversTestSuite struct {
 func (suite *GetReceiversTestSuite) SetupTest() {
 	requestHandler = new(request_handler.StaticRequestHandler)
 	var s utils.Settings= utils.Settings{Receiver:utils.ReceiverInfo{MaxConnections:10,StaticEndpoints:[]string{"ip1","ip2"}},
-	Broker:utils.BrokerInfo{StaticEndpoint:"ip_broker"}}
+	Broker:utils.BrokerInfo{StaticEndpoint:"ip_broker"},Mongo:utils.MongoInfo{StaticEndpoint:"ip_mongo"}}
 
 	requestHandler.Init(s)
 	logger.SetMockLog()
@@ -77,4 +77,12 @@ func (suite *GetReceiversTestSuite) TestGetBroker() {
 	assertExpectations(suite.T())
 }
 
+func (suite *GetReceiversTestSuite) TestGetMongo() {
+	logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("processing get mongo")))
 
+	w := doRequest("/mongo")
+
+	suite.Equal(http.StatusOK, w.Code, "code ok")
+	suite.Equal(w.Body.String(), "ip_mongo", "result")
+	assertExpectations(suite.T())
+}
diff --git a/discovery/src/asapo_discovery/server/settings_test.go b/discovery/src/asapo_discovery/server/settings_test.go
index 50307b06a23a089dfa88d53f5ae57d0b0a83e8e0..b6025b25a5f681940d7728883abe3c07b427f6e8 100644
--- a/discovery/src/asapo_discovery/server/settings_test.go
+++ b/discovery/src/asapo_discovery/server/settings_test.go
@@ -14,6 +14,7 @@ func fillSettings(mode string) utils.Settings {
 	settings.LogLevel = "info"
 	settings.Receiver.StaticEndpoints=[]string{"ip1","ip2"}
 	settings.Broker.StaticEndpoint="ip_b"
+	settings.Mongo.StaticEndpoint="ip_m"
 	settings.ConsulEndpoints=[]string{"ipc1","ipc2"}
 	return settings
 }
@@ -44,6 +45,12 @@ func TestSettingsStaticModeNoBrokerEndpoints(t *testing.T) {
 	assert.NotNil(t, err)
 }
 
+func TestSettingsStaticModeNoMongoEndpoints(t *testing.T) {
+	settings := fillSettings("static")
+	settings.Mongo.StaticEndpoint=""
+	err := settings.Validate()
+	assert.NotNil(t, err)
+}
 
 
 func TestSettingsConsulModeNoEndpoints(t *testing.T) {
diff --git a/receiver/src/receiver_config.cpp b/receiver/src/receiver_config.cpp
index ef8c56b3a22b6652c3f775e1e615a60943b47f22..ae55ea992df809839004dbc981d001d8a81c2681 100644
--- a/receiver/src/receiver_config.cpp
+++ b/receiver/src/receiver_config.cpp
@@ -18,7 +18,7 @@ Error ReceiverConfigFactory::SetConfig(std::string file_name) {
     std::string log_level;
     Error err;
 
-    (err = parser.GetString("MonitorDbAddress", &config.monitor_db_uri)) ||
+    (err = parser.GetString("PerformanceDbServer", &config.performance_db_uri)) ||
     (err = parser.GetUInt64("ListenPort", &config.listen_port)) ||
     (err = parser.Embedded("DataServer").GetUInt64("ListenPort", &config.dataserver.listen_port)) ||
     (err = parser.Embedded("DataServer").GetUInt64("NThreads", &config.dataserver.nthreads)) ||
@@ -27,12 +27,13 @@ Error ReceiverConfigFactory::SetConfig(std::string file_name) {
     (err = parser.Embedded("DataCache").GetBool("Use", &config.use_datacache)) ||
     (err = parser.Embedded("DataCache").GetUInt64("SizeGB", &config.datacache_size_gb)) ||
     (err = parser.Embedded("DataCache").GetUInt64("ReservedShare", &config.datacache_reserved_share)) ||
-    (err = parser.GetString("BrokerDbAddress", &config.broker_db_uri)) ||
+    (err = parser.GetString("DatabaseServer", &config.database_uri)) ||
+    (err = parser.GetString("DiscoveryServer", &config.discovery_server)) ||
     (err = parser.GetString("Tag", &config.tag)) ||
     (err = parser.GetString("AuthorizationServer", &config.authorization_server)) ||
     (err = parser.GetUInt64("AuthorizationInterval", &config.authorization_interval_ms)) ||
     (err = parser.GetString("RootFolder", &config.root_folder)) ||
-    (err = parser.GetString("MonitorDbName", &config.monitor_db_name)) ||
+    (err = parser.GetString("PerformanceDbName", &config.performance_db_name)) ||
     (err = parser.GetString("LogLevel", &log_level));
 
     if (err) {
diff --git a/receiver/src/receiver_config.h b/receiver/src/receiver_config.h
index 0181c86f2ae8bbe1e03da1b867da098b70fe6647..7052ac71e260a8f553366b6f4a22a3daab5c7539 100644
--- a/receiver/src/receiver_config.h
+++ b/receiver/src/receiver_config.h
@@ -9,9 +9,9 @@
 namespace asapo {
 
 struct ReceiverConfig {
-    std::string monitor_db_uri;
-    std::string monitor_db_name;
-    std::string broker_db_uri;
+    std::string performance_db_uri;
+    std::string performance_db_name;
+    std::string database_uri;
     std::string root_folder;
     uint64_t listen_port = 0;
     std::string authorization_server;
@@ -25,6 +25,7 @@ struct ReceiverConfig {
     std::string tag;
     std::string source_host;
     ReceiverDataCenterConfig dataserver;
+    std::string discovery_server;
 };
 
 const ReceiverConfig* GetReceiverConfig();
diff --git a/receiver/src/receiver_error.h b/receiver/src/receiver_error.h
index 1a12952e36c5162115de4e1c9af6ba71dd979dcd..ccfffa5f7aa3f07f12684efb67f05c335163feed 100644
--- a/receiver/src/receiver_error.h
+++ b/receiver/src/receiver_error.h
@@ -9,7 +9,8 @@ enum class ReceiverErrorType {
     kInvalidOpCode,
     kBadRequest,
     kReject,
-    kAuthorizationFailure
+    kAuthorizationFailure,
+    kCannotConnectToDatabase
 };
 
 using ReceiverErrorTemplate = ServiceErrorTemplate<ReceiverErrorType, ErrorType::kReceiverError>;
@@ -24,6 +25,12 @@ auto const kReject = ReceiverErrorTemplate{
     "request rejected", ReceiverErrorType::kReject
 };
 
+
+auto const kCannotConnectToDatabase = ReceiverErrorTemplate{
+    "cannot connect to database", ReceiverErrorType::kCannotConnectToDatabase
+};
+
+
 auto const kBadRequest = ReceiverErrorTemplate{
     "Bad request", ReceiverErrorType::kBadRequest
 };
diff --git a/receiver/src/request_handler_db.cpp b/receiver/src/request_handler_db.cpp
index d01410d4c20b6965a826010872ea818ca53a927f..059a693bad56ea7c2d69a6470e90f1069591a97b 100644
--- a/receiver/src/request_handler_db.cpp
+++ b/receiver/src/request_handler_db.cpp
@@ -17,6 +17,7 @@ Error RequestHandlerDb::ProcessRequest(Request* request) const {
 }
 
 RequestHandlerDb::RequestHandlerDb(std::string collection_name): log__{GetDefaultReceiverLogger()},
+    http_client__{DefaultHttpClient()},
     collection_name_{std::move(collection_name)} {
     DatabaseFactory factory;
     Error err;
@@ -27,10 +28,42 @@ StatisticEntity RequestHandlerDb::GetStatisticEntity() const {
     return StatisticEntity::kDatabase;
 }
 
+
+Error RequestHandlerDb::GetDatabaseServerUri(std::string* uri) const {
+    if (GetReceiverConfig()->database_uri != "auto") {
+        *uri = GetReceiverConfig()->database_uri;
+        return nullptr;
+    }
+
+    HttpCode code;
+    Error http_err;
+    *uri = http_client__->Get(GetReceiverConfig()->discovery_server + "/mongo", &code, &http_err);
+    if (http_err) {
+        log__->Error(std::string{"http error when discover database server "} + " from " + GetReceiverConfig()->discovery_server
+                     + " : " + http_err->Explain());
+        return ReceiverErrorTemplates::kCannotConnectToDatabase.Generate(http_err->Explain());
+    }
+
+    if (code != HttpCode::OK) {
+        log__->Error(std::string{"http error when discover database server "} + " from " + GetReceiverConfig()->discovery_server
+                     + " : http code" + std::to_string((int)code));
+        return ReceiverErrorTemplates::kCannotConnectToDatabase.Generate("error from discovery service");
+    }
+
+    log__->Debug(std::string{"found database server "} + *uri);
+
+    return nullptr;
+}
+
+
 Error RequestHandlerDb::ConnectToDbIfNeeded() const {
     if (!connected_to_db) {
-        Error err = db_client__->Connect(GetReceiverConfig()->broker_db_uri, db_name_,
-                                         collection_name_);
+        std::string uri;
+        auto err = GetDatabaseServerUri(&uri);
+        if (err) {
+            return err;
+        }
+        err = db_client__->Connect(uri, db_name_, collection_name_);
         if (err) {
             return err;
         }
@@ -39,4 +72,5 @@ Error RequestHandlerDb::ConnectToDbIfNeeded() const {
     return nullptr;
 }
 
+
 }
\ No newline at end of file
diff --git a/receiver/src/request_handler_db.h b/receiver/src/request_handler_db.h
index dcf0a0ed6269dc5f8ce2e154f208d09269f9ee63..d25b6d1fa648f46b24ee5d3998d51e09b1e64146 100644
--- a/receiver/src/request_handler_db.h
+++ b/receiver/src/request_handler_db.h
@@ -4,12 +4,13 @@
 #include "request_handler.h"
 #include "database/database.h"
 #include "logger/logger.h"
+#include "http_client/http_client.h"
 
 #include "io/io.h"
 
 namespace asapo {
 
-class RequestHandlerDb: public ReceiverRequestHandler {
+class RequestHandlerDb : public ReceiverRequestHandler {
   public:
     RequestHandlerDb() = delete;
     RequestHandlerDb(std::string collection_name);
@@ -17,14 +18,16 @@ class RequestHandlerDb: public ReceiverRequestHandler {
     Error ProcessRequest(Request* request) const override;
     std::unique_ptr<Database> db_client__;
     const AbstractLogger* log__;
+    std::unique_ptr<HttpClient> http_client__;
   protected:
     Error ConnectToDbIfNeeded() const;
     mutable bool connected_to_db = false;
     mutable std::string db_name_;
     std::string collection_name_;
+  private:
+    Error GetDatabaseServerUri(std::string* uri) const ;
 };
 
 }
 
-
 #endif //ASAPO_REQUEST_HANDLER_DB_H
diff --git a/receiver/src/request_handler_db_meta_write.cpp b/receiver/src/request_handler_db_meta_write.cpp
index 74113d8ea659c2cfa61032b7689e3b45df21c780..25e0277ce6ec6d240c99d22f1f00b3c48c7cf117 100644
--- a/receiver/src/request_handler_db_meta_write.cpp
+++ b/receiver/src/request_handler_db_meta_write.cpp
@@ -20,7 +20,7 @@ Error RequestHandlerDbMetaWrite::ProcessRequest(Request* request) const {
     if (!err) {
         log__->Debug(std::string{"insert beamtime meta"} + " to " + collection_name_ + " in " +
                      db_name_ +
-                     " at " + GetReceiverConfig()->broker_db_uri);
+                     " at " + GetReceiverConfig()->database_uri);
     }
     return err;
 }
diff --git a/receiver/src/request_handler_db_write.cpp b/receiver/src/request_handler_db_write.cpp
index f1491480cdf2730910df36f019e8293d58b870b6..19670a85b3b8f48dc0451e92f9e8301c2f910e3a 100644
--- a/receiver/src/request_handler_db_write.cpp
+++ b/receiver/src/request_handler_db_write.cpp
@@ -34,7 +34,7 @@ Error RequestHandlerDbWrite::InsertRecordToDb(const Request* request) const {
         if (!err) {
             log__->Debug(std::string{"insert record id "} + std::to_string(file_info.id) + " to " + collection_name_ + " in " +
                          db_name_ +
-                         " at " + GetReceiverConfig()->broker_db_uri);
+                         " at " + GetReceiverConfig()->database_uri);
         }
     } else {
         auto subset_id = request->GetCustomData()[1];
@@ -44,7 +44,7 @@ Error RequestHandlerDbWrite::InsertRecordToDb(const Request* request) const {
             log__->Debug(std::string{"insert record as subset id "} + std::to_string(subset_id) + ", id: " +
                          std::to_string(file_info.id) + " to " + collection_name_ + " in " +
                          db_name_ +
-                         " at " + GetReceiverConfig()->broker_db_uri);
+                         " at " + GetReceiverConfig()->database_uri);
         }
     }
     return err;
diff --git a/receiver/src/statistics_sender_influx_db.cpp b/receiver/src/statistics_sender_influx_db.cpp
index 06338e84946785cd0e8ffcefbe2cb366cf8630fd..3598ca27b57081c72e6237301670f81e04ebe194 100644
--- a/receiver/src/statistics_sender_influx_db.cpp
+++ b/receiver/src/statistics_sender_influx_db.cpp
@@ -21,11 +21,11 @@ void StatisticsSenderInfluxDb::SendStatistics(const StatisticsToSend& statistic)
     //todo: send statistics async
     HttpCode code;
     Error err;
-    auto response = httpclient__->Post(GetReceiverConfig()->monitor_db_uri + "/write?db=" +
-                                       GetReceiverConfig()->monitor_db_name, StatisticsToString(statistic),
+    auto response = httpclient__->Post(GetReceiverConfig()->performance_db_uri + "/write?db=" +
+                                       GetReceiverConfig()->performance_db_name, StatisticsToString(statistic),
                                        &code, &err);
-    std::string msg = "sending statistics to " + GetReceiverConfig()->monitor_db_name + " at " +
-                      GetReceiverConfig()->monitor_db_uri;
+    std::string msg = "sending statistics to " + GetReceiverConfig()->performance_db_name + " at " +
+                      GetReceiverConfig()->performance_db_uri;
     if (err) {
         log__->Error(msg + " - " + err->Explain());
         return;
diff --git a/receiver/unittests/mock_receiver_config.cpp b/receiver/unittests/mock_receiver_config.cpp
index 1c00babde188e4d43290f75e3e264ffabd0a6945..29805ebde5ef0de2d1d1aada86537fcbacb82a6d 100644
--- a/receiver/unittests/mock_receiver_config.cpp
+++ b/receiver/unittests/mock_receiver_config.cpp
@@ -41,9 +41,11 @@ Error SetReceiverConfig (const ReceiverConfig& config, std::string error_field)
         break;
     }
 
-    auto config_string = std::string("{") + Key("MonitorDbAddress", error_field) + "\"" + config.monitor_db_uri + "\"";
-    config_string += "," + Key("MonitorDbName", error_field) + "\"" + config.monitor_db_name + "\"";
-    config_string += "," + Key("BrokerDbAddress", error_field) + "\"" + config.broker_db_uri + "\"";
+    auto config_string = std::string("{") + Key("PerformanceDbServer",
+                                                error_field) + "\"" + config.performance_db_uri + "\"";
+    config_string += "," + Key("PerformanceDbName", error_field) + "\"" + config.performance_db_name + "\"";
+    config_string += "," + Key("DatabaseServer", error_field) + "\"" + config.database_uri + "\"";
+    config_string += "," + Key("DiscoveryServer", error_field) + "\"" + config.discovery_server + "\"";
     config_string += "," + Key("ListenPort", error_field) + std::to_string(config.listen_port);
     config_string += "," + Key("DataServer", error_field) + "{";
     config_string += Key("ListenPort", error_field) + std::to_string(config.dataserver.listen_port);
diff --git a/receiver/unittests/test_config.cpp b/receiver/unittests/test_config.cpp
index 8b967a7dc1fc68f3ecfc4ce0afe3e5b8f772fd90..6231fed9089ba3bfbe11804e1d3543aa6b7450a1 100644
--- a/receiver/unittests/test_config.cpp
+++ b/receiver/unittests/test_config.cpp
@@ -47,11 +47,11 @@ class ConfigTests : public Test {
         test_config.listen_port = 4200;
         test_config.dataserver.listen_port = 4201;
         test_config.tag = "receiver1";
-        test_config.monitor_db_name = "db_test";
-        test_config.monitor_db_uri = "localhost:8086";
+        test_config.performance_db_name = "db_test";
+        test_config.performance_db_uri = "localhost:8086";
         test_config.write_to_disk = true;
         test_config.write_to_db = true;
-        test_config.broker_db_uri = "localhost:27017";
+        test_config.database_uri = "localhost:27017";
         test_config.log_level = asapo::LogLevel::Error;
         test_config.root_folder = "test_fodler";
         test_config.authorization_interval_ms = 10000;
@@ -61,6 +61,7 @@ class ConfigTests : public Test {
         test_config.datacache_size_gb = 2;
         test_config.source_host = "host";
         test_config.dataserver.nthreads = 5;
+        test_config.discovery_server = "discovery";
     }
 
 };
@@ -74,9 +75,9 @@ TEST_F(ConfigTests, ReadSettings) {
     auto config = GetReceiverConfig();
 
     ASSERT_THAT(err, Eq(nullptr));
-    ASSERT_THAT(config->monitor_db_uri, Eq("localhost:8086"));
-    ASSERT_THAT(config->monitor_db_name, Eq("db_test"));
-    ASSERT_THAT(config->broker_db_uri, Eq("localhost:27017"));
+    ASSERT_THAT(config->performance_db_uri, Eq("localhost:8086"));
+    ASSERT_THAT(config->performance_db_name, Eq("db_test"));
+    ASSERT_THAT(config->database_uri, Eq("localhost:27017"));
     ASSERT_THAT(config->listen_port, Eq(4200));
     ASSERT_THAT(config->dataserver.listen_port, Eq(4201));
     ASSERT_THAT(config->authorization_interval_ms, Eq(10000));
@@ -92,18 +93,17 @@ TEST_F(ConfigTests, ReadSettings) {
     ASSERT_THAT(config->source_host, Eq("host"));
     ASSERT_THAT(config->dataserver.nthreads, Eq(5));
     ASSERT_THAT(config->dataserver.tag, Eq("receiver1_ds"));
-
-
+    ASSERT_THAT(config->discovery_server, Eq("discovery"));
 }
 
 
 TEST_F(ConfigTests, ErrorReadSettings) {
     PrepareConfig();
 
-    std::vector<std::string>fields {"MonitorDbAddress", "ListenPort", "DataServer", "ListenPort", "WriteToDisk",
-                                    "WriteToDb", "DataCache", "Use", "SizeGB", "ReservedShare", "BrokerDbAddress", "Tag",
-                                    "AuthorizationServer", "AuthorizationInterval", "RootFolder", "MonitorDbName", "LogLevel",
-                                    "SourceHost", "NThreads"};
+    std::vector<std::string>fields {"PerformanceDbServer", "ListenPort", "DataServer", "ListenPort", "WriteToDisk",
+                                    "WriteToDb", "DataCache", "Use", "SizeGB", "ReservedShare", "DatabaseServer", "Tag",
+                                    "AuthorizationServer", "AuthorizationInterval", "RootFolder", "PerformanceDbName", "LogLevel",
+                                    "SourceHost", "NThreads", "DiscoveryServer"};
     for (const auto& field : fields) {
         auto err = asapo::SetReceiverConfig(test_config, field);
         ASSERT_THAT(err, Ne(nullptr));
diff --git a/receiver/unittests/test_request_handler_db.cpp b/receiver/unittests/test_request_handler_db.cpp
index 6e5b7c9c5218fc93cc4545c13a223e8937eb22af..c0163e949009210c1789c7d8a93800220e59787c 100644
--- a/receiver/unittests/test_request_handler_db.cpp
+++ b/receiver/unittests/test_request_handler_db.cpp
@@ -4,6 +4,7 @@
 #include "unittests/MockIO.h"
 #include "unittests/MockDatabase.h"
 #include "unittests/MockLogger.h"
+#include "unittests/MockHttpClient.h"
 
 #include "../src/receiver_error.h"
 #include "../src/request.h"
@@ -46,9 +47,12 @@ using asapo::RequestHandlerDb;
 using ::asapo::GenericRequestHeader;
 
 using asapo::MockDatabase;
+using ::asapo::MockHttpClient;
+
 using asapo::RequestFactory;
 using asapo::SetReceiverConfig;
 using asapo::ReceiverConfig;
+using asapo::HttpCode;
 
 
 namespace {
@@ -64,24 +68,65 @@ class DbHandlerTests : public Test {
     std::string expected_beamtime_id = "beamtime_id";
     std::string expected_stream = "stream";
     std::string expected_default_stream = "detector";
+    std::string expected_discovery_server = "discovery";
+    std::string expected_database_server = "127.0.0.1:27017";
+
+    MockHttpClient mock_http_client;
 
     void SetUp() override {
         GenericRequestHeader request_header;
         request_header.data_id = 2;
         handler.db_client__ = std::unique_ptr<asapo::Database> {&mock_db};
         handler.log__ = &mock_logger;
+        handler.http_client__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client};
         mock_request.reset(new NiceMock<MockRequest> {request_header, 1, ""});
         ON_CALL(*mock_request, GetBeamtimeId()).WillByDefault(ReturnRef(expected_beamtime_id));
         ON_CALL(*mock_request, GetStream()).WillByDefault(ReturnRef(expected_stream));
 
     }
     void TearDown() override {
+        handler.http_client__.release();
         handler.db_client__.release();
     }
-
+    void MockAuthRequest(bool error, HttpCode code = HttpCode::OK);
 
 };
 
+
+void DbHandlerTests::MockAuthRequest(bool error, HttpCode code) {
+    if (error) {
+        EXPECT_CALL(mock_http_client, Get_t(expected_discovery_server + "/mongo",  _, _)).
+        WillOnce(
+            DoAll(SetArgPointee<2>(new asapo::SimpleError("http error")),
+                  Return("")
+                 ));
+        EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("discover database server"),
+                                             HasSubstr("http error"),
+                                             HasSubstr(expected_discovery_server))));
+
+    } else {
+        EXPECT_CALL(mock_http_client, Get_t(expected_discovery_server + "/mongo", _, _)).
+        WillOnce(
+            DoAll(
+                SetArgPointee<1>(code),
+                SetArgPointee<2>(nullptr),
+                Return(expected_database_server)
+            ));
+        if (code != HttpCode::OK) {
+            EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure discover database server"),
+                                                 HasSubstr("http code"),
+                                                 HasSubstr(std::to_string(int(code))),
+                                                 HasSubstr(expected_discovery_server))));
+        } else {
+            EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("found database server"),
+                                                 HasSubstr(expected_database_server))));
+        }
+    }
+
+
+}
+
+
 TEST(DbHandler, Constructor) {
     RequestHandlerDb handler{""};
     ASSERT_THAT(dynamic_cast<asapo::MongoDBClient*>(handler.db_client__.get()), Ne(nullptr));
@@ -95,8 +140,48 @@ TEST_F(DbHandlerTests, CheckStatisticEntity) {
 }
 
 
+TEST_F(DbHandlerTests, ProcessRequestDiscoversMongoDbAddress) {
+    config.database_uri = "auto";
+    config.discovery_server = expected_discovery_server;
+    SetReceiverConfig(config, "none");
+
+    MockAuthRequest(false, HttpCode::OK);
+
+    EXPECT_CALL(*mock_request, GetBeamtimeId())
+    .WillOnce(ReturnRef(expected_beamtime_id))
+    ;
+
+    EXPECT_CALL(*mock_request, GetStream())
+    .WillOnce(ReturnRef(expected_stream))
+    ;
+
+
+    EXPECT_CALL(mock_db, Connect_t(expected_database_server, expected_beamtime_id + "_" + expected_stream,
+                                   expected_collection_name)).
+    WillOnce(testing::Return(nullptr));
+
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(nullptr));
+}
+
+TEST_F(DbHandlerTests, ProcessRequestErrorDiscoversMongoDbAddress) {
+    config.database_uri = "auto";
+    config.discovery_server = expected_discovery_server;
+    SetReceiverConfig(config, "none");
+
+
+    MockAuthRequest(true, HttpCode::BadRequest);
+
+    EXPECT_CALL(mock_db, Connect_t(_, _, _)).Times(0);
+
+    auto err = handler.ProcessRequest(mock_request.get());
+    ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kCannotConnectToDatabase));
+}
+
+
+
 TEST_F(DbHandlerTests, ProcessRequestCallsConnectDbWhenNotConnected) {
-    config.broker_db_uri = "127.0.0.1:27017";
+    config.database_uri = "127.0.0.1:27017";
     SetReceiverConfig(config, "none");
 
 
diff --git a/receiver/unittests/test_request_handler_db_meta_writer.cpp b/receiver/unittests/test_request_handler_db_meta_writer.cpp
index 00aaee3cec84fa5604e891b963c63fc19a58d7d1..a81ff983d526fa41ba78fcdc8ae0bf8038a89dd5 100644
--- a/receiver/unittests/test_request_handler_db_meta_writer.cpp
+++ b/receiver/unittests/test_request_handler_db_meta_writer.cpp
@@ -96,7 +96,7 @@ TEST_F(DbMetaWriterHandlerTests, CallsUpdate) {
     ;
 
 
-    EXPECT_CALL(mock_db, Connect_t(config.broker_db_uri, expected_beamtime_id + "_" + expected_stream,
+    EXPECT_CALL(mock_db, Connect_t(config.database_uri, expected_beamtime_id + "_" + expected_stream,
                                    expected_collection_name)).
     WillOnce(testing::Return(nullptr));
 
@@ -113,7 +113,7 @@ TEST_F(DbMetaWriterHandlerTests, CallsUpdate) {
     WillOnce(testing::Return(nullptr));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert beamtime meta"),
-                                         HasSubstr(config.broker_db_uri),
+                                         HasSubstr(config.database_uri),
                                          HasSubstr(expected_beamtime_id),
                                          HasSubstr(expected_collection_name)
                                         )
diff --git a/receiver/unittests/test_request_handler_db_writer.cpp b/receiver/unittests/test_request_handler_db_writer.cpp
index 5f83e996463d3edd8ad836544e3f1f582c76241f..11047e9e98c819aee04be24e531d259fbb343f00 100644
--- a/receiver/unittests/test_request_handler_db_writer.cpp
+++ b/receiver/unittests/test_request_handler_db_writer.cpp
@@ -53,6 +53,13 @@ using asapo::ReceiverConfig;
 
 namespace {
 
+TEST(DbWriterHandler, Constructor) {
+    RequestHandlerDbWrite handler{""};
+    ASSERT_THAT(dynamic_cast<asapo::HttpClient*>(handler.http_client__.get()), Ne(nullptr));
+}
+
+
+
 class DbWriterHandlerTests : public Test {
   public:
     std::string expected_collection_name = asapo::kDBDataCollectionName;
@@ -81,7 +88,7 @@ class DbWriterHandlerTests : public Test {
         handler.db_client__ = std::unique_ptr<asapo::Database> {&mock_db};
         handler.log__ = &mock_logger;
         mock_request.reset(new NiceMock<MockRequest> {request_header, 1, ""});
-        config.broker_db_uri = "127.0.0.1:27017";
+        config.database_uri = "127.0.0.1:27017";
         config.source_host = expected_hostname;
         config.dataserver.listen_port = expected_port;
         SetReceiverConfig(config, "none");
@@ -127,7 +134,7 @@ void DbWriterHandlerTests::ExpectRequestParams(asapo::Opcode op_code, const std:
     std::string db_name = expected_beamtime_id;
     db_name += "_" + stream;
 
-    EXPECT_CALL(mock_db, Connect_t(config.broker_db_uri, db_name, expected_collection_name)).
+    EXPECT_CALL(mock_db, Connect_t(config.database_uri, db_name, expected_collection_name)).
     WillOnce(testing::Return(nullptr));
 
     EXPECT_CALL(*mock_request, GetDataSize())
@@ -180,7 +187,7 @@ TEST_F(DbWriterHandlerTests, CallsInsert) {
     WillOnce(testing::Return(nullptr));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert record"),
-                                         HasSubstr(config.broker_db_uri),
+                                         HasSubstr(config.database_uri),
                                          HasSubstr(expected_beamtime_id),
                                          HasSubstr(expected_stream),
                                          HasSubstr(expected_collection_name)
@@ -201,7 +208,7 @@ TEST_F(DbWriterHandlerTests, CallsInsertSubset) {
     WillOnce(testing::Return(nullptr));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert record"),
-                                         HasSubstr(config.broker_db_uri),
+                                         HasSubstr(config.database_uri),
                                          HasSubstr(expected_beamtime_id),
                                          HasSubstr(expected_collection_name)
                                         )
diff --git a/receiver/unittests/test_statistics_sender_fluentd.cpp b/receiver/unittests/test_statistics_sender_fluentd.cpp
index 137bb4f847897846a366972bd81959c5830938bb..3060507558cebc47f6737d363c8945c099445c4f 100644
--- a/receiver/unittests/test_statistics_sender_fluentd.cpp
+++ b/receiver/unittests/test_statistics_sender_fluentd.cpp
@@ -65,8 +65,8 @@ class SenderFluentdTests : public Test {
         statistics.tags.push_back(std::make_pair("name1", "value1"));
         statistics.tags.push_back(std::make_pair("name2", "value2"));
 
-        config.monitor_db_uri = "test_uri";
-        config.monitor_db_name = "test_name";
+        config.performance_db_uri = "test_uri";
+        config.performance_db_name = "test_name";
         SetReceiverConfig(config, "none");
 
         sender.statistics_log__.reset(&mock_logger);
diff --git a/receiver/unittests/test_statistics_sender_influx_db.cpp b/receiver/unittests/test_statistics_sender_influx_db.cpp
index 97fe930767dcd1c6f9819d6ac277fa1ccaa407a2..d4e293c0ce7641ad25c5644cbc1974d21637ae90 100644
--- a/receiver/unittests/test_statistics_sender_influx_db.cpp
+++ b/receiver/unittests/test_statistics_sender_influx_db.cpp
@@ -66,8 +66,8 @@ class SenderInfluxDbTests : public Test {
         statistics.tags.push_back(std::make_pair("name1", "value1"));
         statistics.tags.push_back(std::make_pair("name2", "value2"));
 
-        config.monitor_db_uri = "test_uri";
-        config.monitor_db_name = "test_name";
+        config.performance_db_uri = "test_uri";
+        config.performance_db_name = "test_name";
         SetReceiverConfig(config, "none");
 
         sender.httpclient__.reset(&mock_http_client);
@@ -88,7 +88,7 @@ TEST_F(SenderInfluxDbTests, SendStatisticsCallsPost) {
               Return("")
              ));
 
-    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("sending statistics"), HasSubstr(config.monitor_db_uri))));
+    EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("sending statistics"), HasSubstr(config.performance_db_uri))));
 
 
     sender.SendStatistics(statistics);
@@ -113,8 +113,8 @@ TEST_F(SenderInfluxDbTests, LogDebugSendStatistics) {
              ));
 
     EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("sending statistics"),
-                                         HasSubstr(config.monitor_db_uri),
-                                         HasSubstr(config.monitor_db_name)
+                                         HasSubstr(config.performance_db_uri),
+                                         HasSubstr(config.performance_db_name)
                                         )
                                   )
                );
diff --git a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
index 8e7f70f9a3b18db3af8062156cbf359b7efbb60a..86ced0726a978b47a18d27b1d29650c64853474a 100644
--- a/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/broker_mongo_restart/check_linux.sh
@@ -55,7 +55,7 @@ Cleanup() {
 influx -execute "create database ${monitor_database_name}"
 
 sed -i 's/27017/27016/g' receiver.json.tpl
-sed -i 's/27017/27016/g' broker.json.tpl
+sed -i 's/27017/27016/g' discovery.json.tpl
 sed -i 's/info/debug/g' broker.json.tpl
 
 start_mongo
diff --git a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
index 4a5c6de8e84b6a86ed90a40cc75805d9a1ae68aa..f8f9a763921735a77136281a3021d4b8e0b91b46 100644
--- a/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
+++ b/tests/automatic/high_avail/receiver_mongo_restart/check_linux.sh
@@ -54,7 +54,7 @@ wait_mongo
 # create db before worker starts reading it. todo: git rid of it
 echo "db.${beamtime_id}_detector.insert({dummy:1})" | mongo --port 27016 ${beamtime_id}_detector
 
-sed -i 's/27017/27016/g' receiver.json.tpl
+sed -i 's/27017/27016/g' discovery.json.tpl
 
 
 nomad run authorizer.nmd
diff --git a/tests/automatic/settings/broker_settings.json b/tests/automatic/settings/broker_settings.json
index 9d95a7c87a13c575e3abda029018ecf5a863f6a2..dc249838039cab80bc38414e200c1911d154b591 100644
--- a/tests/automatic/settings/broker_settings.json
+++ b/tests/automatic/settings/broker_settings.json
@@ -1,7 +1,7 @@
 {
-  "BrokerDbAddress":"127.0.0.1:27017",
-  "MonitorDbAddress": "localhost:8086",
-  "MonitorDbName": "db_test",
+  "DatabaseServer":"127.0.0.1:27017",
+  "PerformanceDbServer": "localhost:8086",
+  "PerformanceDbName": "db_test",
   "port":5005,
   "LogLevel":"info",
   "SecretFile":"auth_secret.key"
diff --git a/tests/automatic/settings/broker_settings.json.tpl b/tests/automatic/settings/broker_settings.json.tpl
index c1cfd68aa0d09885da1000a7bb566b5510eec92a..0ca5866f9d51339e15ed19f6dbcd1c3a5e1c3c92 100644
--- a/tests/automatic/settings/broker_settings.json.tpl
+++ b/tests/automatic/settings/broker_settings.json.tpl
@@ -1,7 +1,8 @@
 {
-  "BrokerDbAddress":"127.0.0.1:27017",
-  "MonitorDbAddress": "localhost:8086",
-  "MonitorDbName": "db_test",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
+  "PerformanceDbServer": "localhost:8086",
+  "PerformanceDbName": "db_test",
   "port":{{ env "NOMAD_PORT_broker" }},
   "LogLevel":"info",
   "SecretFile":"auth_secret.key"
diff --git a/tests/automatic/settings/discovery_fixed_settings.json b/tests/automatic/settings/discovery_fixed_settings.json
deleted file mode 100644
index 25cba67824339d7c81a9d39b4a89d8356d4ea9bf..0000000000000000000000000000000000000000
--- a/tests/automatic/settings/discovery_fixed_settings.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "Mode": "consul",
-  "Receiver": {
-    "MaxConnections": 32
-  },
-  "Port": {{ env "NOMAD_PORT_discovery" }},
-  "LogLevel":"debug"
-}
-
-
diff --git a/tests/automatic/settings/discovery_settings.json.tpl b/tests/automatic/settings/discovery_settings.json.tpl
index 25cba67824339d7c81a9d39b4a89d8356d4ea9bf..2095ef388d7270cdfd17f45d0046b654c719c47f 100644
--- a/tests/automatic/settings/discovery_settings.json.tpl
+++ b/tests/automatic/settings/discovery_settings.json.tpl
@@ -3,6 +3,9 @@
   "Receiver": {
     "MaxConnections": 32
   },
+  "Mongo": {
+    "StaticEndpoint": "127.0.0.1:27017"
+  },
   "Port": {{ env "NOMAD_PORT_discovery" }},
   "LogLevel":"debug"
 }
diff --git a/tests/automatic/settings/nginx.conf.tpl b/tests/automatic/settings/nginx.conf.tpl
index 8f9608e576733aab5cdd308875774ef7aecd6e6b..7d6c84e2c31bdf1294075dda2ad2c3d6245fba88 100644
--- a/tests/automatic/settings/nginx.conf.tpl
+++ b/tests/automatic/settings/nginx.conf.tpl
@@ -22,15 +22,9 @@ http {
           set $fluentd_endpoint fluentd.service.asapo;
           set $kibana_endpoint kibana.service.asapo;
           set $grafana_endpoint grafana.service.asapo;
-          set $mongo_endpoint mongo.service.asapo;
           set $influxdb_endpoint influxdb.service.asapo;
           set $elasticsearch_endpoint elasticsearch.service.asapo;
 
-   		  location /mongo/ {
-            rewrite ^/mongo(/.*) $1 break;
-            proxy_pass http://$mongo_endpoint:27017$uri$is_args$args;
-          }
-
    		  location /influxdb/ {
             rewrite ^/influxdb(/.*) $1 break;
             proxy_pass http://$influxdb_endpoint:8086$uri$is_args$args;
diff --git a/tests/automatic/settings/receiver.json.tpl.lin.in b/tests/automatic/settings/receiver.json.tpl.lin.in
index cfe05774225cda1124632b3706cc5c65e95431c0..2f27019b74b1e721ecc52ea1a4b28c1c5e46e6a3 100644
--- a/tests/automatic/settings/receiver.json.tpl.lin.in
+++ b/tests/automatic/settings/receiver.json.tpl.lin.in
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
   "DataServer": {
     "NThreads": 2,
     "ListenPort": {{ env "NOMAD_PORT_recv_ds" }}
diff --git a/tests/automatic/settings/receiver.json.tpl.win.in b/tests/automatic/settings/receiver.json.tpl.win.in
index 39ad1b7c1882fab2bda0db4700c184dcdef7fda1..7fcedb822a21daa57a82a0d3526bda85cbd26ca2 100644
--- a/tests/automatic/settings/receiver.json.tpl.win.in
+++ b/tests/automatic/settings/receiver.json.tpl.win.in
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"auto",
+  "DiscoveryServer": "localhost:8400/discovery",
   "AuthorizationServer": "localhost:8400/authorizer",
   "AuthorizationInterval": 10000,
   "ListenPort": {{ env "NOMAD_PORT_recv" }},
diff --git a/tests/manual/performance_broker/settings.json b/tests/manual/performance_broker/settings.json
index 4385f5a6519948de87eff1c7bce3d0ecc287aed3..72e89b21fb3a4eb09f4914c63773e0df5343324c 100644
--- a/tests/manual/performance_broker/settings.json
+++ b/tests/manual/performance_broker/settings.json
@@ -1,7 +1,7 @@
 {
-  "BrokerDbAddress":"localhost:27017",
-  "MonitorDbAddress": "localhost:8086",
-  "MonitorDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
+  "PerformanceDbServer": "localhost:8086",
+  "PerformanceDbName": "db_test",
   "port":5005,
   "LogLevel":"info",
   "SecretFile":"auth_secret.key"
diff --git a/tests/manual/performance_broker/test.sh b/tests/manual/performance_broker/test.sh
index 268483698ecacf1bfa768ee4a27bc2ccdcb14682..d1685e15ec3e78b8a0265ecb03096520d87c04e5 100755
--- a/tests/manual/performance_broker/test.sh
+++ b/tests/manual/performance_broker/test.sh
@@ -22,7 +22,7 @@ worker_dir=~/broker_test
 service_dir=~/broker_test
 
 
-cat settings.json | jq ".MonitorDbAddress = \"${monitor_node}:${monitor_port}\"" > settings_tmp.json
+cat settings.json | jq ".PerformanceDbServer = \"${monitor_node}:${monitor_port}\"" > settings_tmp.json
 
 cat discovery.json | jq ".Broker.StaticEndpoint = \"${service_node}:5005\"" > discovery_tmp.json
 
diff --git a/tests/manual/performance_full_chain_simple/broker.json b/tests/manual/performance_full_chain_simple/broker.json
index 4385f5a6519948de87eff1c7bce3d0ecc287aed3..72e89b21fb3a4eb09f4914c63773e0df5343324c 100644
--- a/tests/manual/performance_full_chain_simple/broker.json
+++ b/tests/manual/performance_full_chain_simple/broker.json
@@ -1,7 +1,7 @@
 {
-  "BrokerDbAddress":"localhost:27017",
-  "MonitorDbAddress": "localhost:8086",
-  "MonitorDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
+  "PerformanceDbServer": "localhost:8086",
+  "PerformanceDbName": "db_test",
   "port":5005,
   "LogLevel":"info",
   "SecretFile":"auth_secret.key"
diff --git a/tests/manual/performance_full_chain_simple/receiver.json b/tests/manual/performance_full_chain_simple/receiver.json
index b9bcf2a5817b4a61bd75645750e2c24c6d14a90d..71aea67fd544b12cd6aac8b3e9759cd6b0fae05b 100644
--- a/tests/manual/performance_full_chain_simple/receiver.json
+++ b/tests/manual/performance_full_chain_simple/receiver.json
@@ -1,8 +1,9 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
   "AuthorizationServer": "localhost:5007",
+  "DiscoveryServer": "localhost:8400/discovery",
   "AuthorizationInterval": 10000,
   "ListenPort":4200,
   "DataServer": {
diff --git a/tests/manual/performance_full_chain_simple/test.sh b/tests/manual/performance_full_chain_simple/test.sh
index fb9f6a2af67cba539ed663e26c6cd7484a9f8f86..1786a56a408e31854b8f08190dd502e39ffc63b8 100755
--- a/tests/manual/performance_full_chain_simple/test.sh
+++ b/tests/manual/performance_full_chain_simple/test.sh
@@ -43,7 +43,7 @@ ssh ${receiver_node} mkdir -p ${receiver_dir}/files/${beamline}/${beamtime_id}
 scp ../../../cmake-build-release/receiver/receiver ${receiver_node}:${receiver_dir}
 cat receiver.json |
   jq "to_entries |
-       map(if .key == \"MonitorDbAddress\"
+       map(if .key == \"PerformanceDbServer\"
           then . + {value:\"${monitor_node}:${monitor_port}\"}
           elif .key == \"ListenPort\"
           then . + {value:${receiver_port}}
@@ -96,7 +96,7 @@ broker_dir=~/fullchain_tests
 ssh ${broker_node} mkdir -p ${broker_dir}/logs
 cat broker.json |
   jq "to_entries |
-       map(if .key == \"MonitorDbAddress\"
+       map(if .key == \"PerformanceDbServer\"
           then . + {value:\"${monitor_node}:${monitor_port}\"}
           else .
           end
diff --git a/tests/manual/performance_producer_receiver/receiver.json b/tests/manual/performance_producer_receiver/receiver.json
index b9bcf2a5817b4a61bd75645750e2c24c6d14a90d..1e1ae4d70fc29ba48bc591e12050a0e2f0a8596f 100644
--- a/tests/manual/performance_producer_receiver/receiver.json
+++ b/tests/manual/performance_producer_receiver/receiver.json
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
+  "DiscoveryServer": "localhost:8400/discovery",
   "AuthorizationServer": "localhost:5007",
   "AuthorizationInterval": 10000,
   "ListenPort":4200,
diff --git a/tests/manual/performance_producer_receiver/test.sh b/tests/manual/performance_producer_receiver/test.sh
index faf2493473db989acb220f61099d80c9fa0a848c..251f9d18a14bc1e304f1052f045ccadd4904782a 100755
--- a/tests/manual/performance_producer_receiver/test.sh
+++ b/tests/manual/performance_producer_receiver/test.sh
@@ -54,7 +54,7 @@ scp ../../../cmake-build-release/examples/producer/dummy-data-producer/dummy-dat
 function do_work {
 cat receiver.json |
   jq "to_entries |
-       map(if .key == \"MonitorDbAddress\"
+       map(if .key == \"PerformanceDbServer\"
           then . + {value:\"${monitor_node}:${monitor_port}\"}
           elif .key == \"ListenPort\"
           then . + {value:${receiver_port}}
diff --git a/tests/manual/python_tests/producer/receiver.json.tpl b/tests/manual/python_tests/producer/receiver.json.tpl
index b361ba8510e66deffe308481b949ed36bce309e3..88be63ae321b4974c7c440f7f479dc7682acb806 100644
--- a/tests/manual/python_tests/producer/receiver.json.tpl
+++ b/tests/manual/python_tests/producer/receiver.json.tpl
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
+  "DiscoveryServer": "localhost:8400/discovery",
   "DataServer": {
     "NThreads": 2,
     "ListenPort": {{ env "NOMAD_PORT_recv_ds" }}
diff --git a/tests/manual/receiver_debug_local/receiver.json b/tests/manual/receiver_debug_local/receiver.json
index 0f6bdad83dbb9875b58a0ca3990663ef9f62a9ff..abf1f8b0962a38ba261f84d8fc65c8362bf67acb 100644
--- a/tests/manual/receiver_debug_local/receiver.json
+++ b/tests/manual/receiver_debug_local/receiver.json
@@ -1,7 +1,8 @@
 {
-  "MonitorDbAddress":"localhost:8086",
-  "MonitorDbName": "db_test",
-  "BrokerDbAddress":"localhost:27017",
+  "PerformanceDbServer":"localhost:8086",
+  "PerformanceDbName": "db_test",
+  "DatabaseServer":"localhost:27017",
+  "DiscoveryServer": "localhost:8400/discovery",
   "DataServer": {
     "NThreads": 2,
     "ListenPort": 22000