diff --git a/.gitignore b/.gitignore index 38f92c2ee2bfd3265f5a3051a4210d5d48368e62..32dbba481b82ad0ce03caea4c4fc4e99270a13aa 100644 --- a/.gitignore +++ b/.gitignore @@ -122,4 +122,7 @@ doxygen #GO broker/pkg -discovery/pkg \ No newline at end of file +discovery/pkg +common/go/pkg +authorizer/pkg +asapo_tools/pkg diff --git a/CMakeLists.txt b/CMakeLists.txt index 2285e204fa4a3a56877f8b4857266d59723c070b..31dc962d885da0880e1b54fdfaabf7655074bfbc 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -68,6 +68,10 @@ add_subdirectory(receiver) add_subdirectory(discovery) +add_subdirectory(authorizer) + +add_subdirectory(asapo_tools) + if(BUILD_INTEGRATION_TESTS) add_subdirectory(tests) diff --git a/CMakeModules/prepare_asapo.cmake b/CMakeModules/prepare_asapo.cmake index 26bedee49dad2b3f7a138f5860db32fab6cdf101..86fbf0e39fbdd4633972097af471d582ee05a95b 100644 --- a/CMakeModules/prepare_asapo.cmake +++ b/CMakeModules/prepare_asapo.cmake @@ -2,6 +2,7 @@ function(prepare_asapo) get_target_property(RECEIVER_DIR receiver-bin BINARY_DIR) get_target_property(RECEIVER_NAME receiver-bin OUTPUT_NAME) get_target_property(DISCOVERY_FULLPATH asapo-discovery EXENAME) + get_target_property(AUTHORIZER_FULLPATH asapo-authorizer EXENAME) get_target_property(BROKER_FULLPATH asapo-broker EXENAME) set(WORK_DIR ${CMAKE_CURRENT_BINARY_DIR}) if (WIN32) @@ -11,9 +12,12 @@ function(prepare_asapo) endif() configure_file(${CMAKE_SOURCE_DIR}/config/nomad/receiver.nmd.in receiver.nmd @ONLY) configure_file(${CMAKE_SOURCE_DIR}/config/nomad/discovery.nmd.in discovery.nmd @ONLY) + configure_file(${CMAKE_SOURCE_DIR}/config/nomad/authorizer.nmd.in authorizer.nmd @ONLY) configure_file(${CMAKE_SOURCE_DIR}/config/nomad/broker.nmd.in broker.nmd @ONLY) configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/discovery_settings.json.tpl discovery.json.tpl COPYONLY) + configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/authorizer_settings.json.tpl authorizer.json.tpl COPYONLY) configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json.tpl broker.json.tpl COPYONLY) + configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_secret.key broker_secret.key COPYONLY) configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/nginx.conf.tpl nginx.conf.tpl COPYONLY) configure_file(${CMAKE_SOURCE_DIR}/config/nomad/nginx.nmd.in nginx.nmd @ONLY) diff --git a/asapo_tools/CMakeLists.txt b/asapo_tools/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..2a517df18b3ffb1417721dd797003b0fabb53fbf --- /dev/null +++ b/asapo_tools/CMakeLists.txt @@ -0,0 +1,36 @@ +set (TARGET_NAME asapo) + +if (NOT "$ENV{GOPATH}" STREQUAL "") + set(GOPATH $ENV{GOPATH}) +endif() + +if (NOT GOPATH) + message (FATAL_ERROR "GOPATH not set") +endif() + +message(STATUS "global gopath ${GOPATH}") + +IF(WIN32) + set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go") + set (exe_name "${TARGET_NAME}.exe") +ELSE() + set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go) + set (exe_name "${TARGET_NAME}") +ENDIF() + +include(testing_go) + +add_custom_target(asapo ALL + COMMAND ${CMAKE_COMMAND} -E env GOPATH=${gopath} + go build ${GO_OPTS} -o ${exe_name} asapo_tools/main + VERBATIM) +define_property(TARGET PROPERTY EXENAME + BRIEF_DOCS <executable name> + FULL_DOCS <full-doc>) + +set_target_properties(${TARGET_NAME} PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name}) + + +install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} DESTINATION bin) + +gotest(${TARGET_NAME} "./...") diff --git a/asapo_tools/src/asapo_tools/cli/cli.go b/asapo_tools/src/asapo_tools/cli/cli.go new file mode 100644 index 0000000000000000000000000000000000000000..031d25285cc96bae4d6dca8067cb2c4e8fd87728 --- /dev/null +++ b/asapo_tools/src/asapo_tools/cli/cli.go @@ -0,0 +1,59 @@ +// Package contains asapo commands that can be executed from command line. +// Every CommandXxxx function that is a member of a cmd struct processes asapo xxxx command +package cli + +import ( + "errors" + "flag" + "fmt" + "io" + "os" + "reflect" + "strings" +) + +var flHelp bool + +var outBuf io.Writer = os.Stdout + +func printHelp(f *flag.FlagSet) bool { + if flHelp { + f.Usage() + return true + } else { + return false + } +} + +// DoCommand takes command name as a parameter and executes corresponding to this name cmd method +func DoCommand(name string, args []string) error { + commandName := "Command" + strings.ToUpper(name[:1]) + strings.ToLower(name[1:]) + cmd := new(command) + + methodVal := reflect.ValueOf(cmd).MethodByName(commandName) + if !methodVal.IsValid() { + return errors.New("wrong asapo command: " + name + "\nType 'asapo --help'") + } + cmd.name = name + cmd.args = args + + method := methodVal.Interface().(func() error) + + return method() +} + +// PrintAllCommands prints all available commands (found wihtin methods of cmd) +func PrintAllCommands() { + fmt.Fprintln(outBuf, "\nCommands:") + cmd := new(command) + CmdType := reflect.TypeOf(cmd) + for i := 0; i < CmdType.NumMethod(); i++ { + methodVal := CmdType.Method(i) + if strings.HasPrefix(methodVal.Name, "Command") { + method := methodVal.Func.Interface().(func(*command) error) + cmd.name = strings.ToLower(methodVal.Name)[7:] + cmd.args = []string{"description"} + method(cmd) + } + } +} diff --git a/asapo_tools/src/asapo_tools/cli/command.go b/asapo_tools/src/asapo_tools/cli/command.go new file mode 100644 index 0000000000000000000000000000000000000000..b61171a1a94dc23e97eebd89f41d7d8bd635198b --- /dev/null +++ b/asapo_tools/src/asapo_tools/cli/command.go @@ -0,0 +1,40 @@ +package cli + +import ( + "errors" + "flag" + "fmt" +) + +// A command consists of a command name and arguments, passed to this command (all after asapo name ...) +type command struct { + name string + args []string +} + +// description prints description line and returns true if first command argument is "description". +func (cmd *command) description(d string) bool { + if len(cmd.args) == 1 && cmd.args[0] == "description" { + fmt.Fprintf(outBuf, " %-10s %s\n", cmd.name, d) + return true + } + return false +} + +func (cmd *command) errBadOptions(err string) error { + return errors.New("asapo " + cmd.name + ": " + err + "\nType 'asapo " + cmd.name + " --help'") +} + +// createDefaultFlagset creates new flagset and adds default help behaviour. +func (cmd *command) createDefaultFlagset(description, args string) *flag.FlagSet { + + flags := flag.NewFlagSet(cmd.name, flag.ExitOnError) + flags.BoolVar(&flHelp, "help", false, "Print usage") + flags.Usage = func() { + fmt.Fprintf(outBuf, "Usage:\t\nasapo %s "+args, cmd.name) + fmt.Fprintf(outBuf, "\n\n%s\n", description) + flags.PrintDefaults() + } + + return flags +} diff --git a/asapo_tools/src/asapo_tools/cli/command_test.go b/asapo_tools/src/asapo_tools/cli/command_test.go new file mode 100644 index 0000000000000000000000000000000000000000..c4e89f2239f69728ba190d4db6d7aa59011550b0 --- /dev/null +++ b/asapo_tools/src/asapo_tools/cli/command_test.go @@ -0,0 +1,34 @@ +package cli + +import ( + "bytes" + "testing" + "github.com/stretchr/testify/assert" +) + +var CommandTests = []struct { + cmd command + answer string +}{ + {command{"token", []string{"-secret", "secret_file", "beamtime"}}, "secret"}, + {command{"dummy", []string{"description"}}, "wrong"}, +} + +func TestCommand(t *testing.T) { + outBuf = new(bytes.Buffer) + + for _, test := range CommandTests { + outBuf.(*bytes.Buffer).Reset() + err := DoCommand(test.cmd.name, test.cmd.args) + assert.Contains(t, err.Error(), test.answer, "") + assert.NotNil(t, err, "Should be error") + + } + +} + +func TestPrintAllCommands(t *testing.T) { + outBuf = new(bytes.Buffer) + PrintAllCommands() + assert.Contains(t, outBuf.(*bytes.Buffer).String(), "token", "all commands must have token") +} diff --git a/asapo_tools/src/asapo_tools/cli/token.go b/asapo_tools/src/asapo_tools/cli/token.go new file mode 100644 index 0000000000000000000000000000000000000000..7fdb749d21ebd6c1fae66a1ad629b29b5c00a0a5 --- /dev/null +++ b/asapo_tools/src/asapo_tools/cli/token.go @@ -0,0 +1,76 @@ +package cli + +import ( + "errors" + "os" + "fmt" + "asapo_common/utils" +) + +type tokenFlags struct { + BeamtimeID string + SecretFile string +} + +func generateToken(id string,secret string) string { + hmac := utils.NewHMACAuth(secret) + token,err := hmac.GenerateToken(&id) + + if (err!=nil) { + fmt.Println(err.Error()) + } + return token +} + + +// GenerateToken generates token for workers +func (cmd *command) CommandToken() error { + + message_string := "Generate token" + + if cmd.description(message_string) { + return nil + } + + flags, err := cmd.parseTokenFlags(message_string) + if err != nil { + return err + } + + secret, err := utils.ReadFirstStringFromFile(flags.SecretFile) + if err !=nil { + return err + } + + fmt.Fprintf(outBuf, "%s\n", generateToken(flags.BeamtimeID,secret)) + + return nil +} + + +func (cmd *command) parseTokenFlags(message_string string) (tokenFlags, error) { + + var flags tokenFlags + flagset := cmd.createDefaultFlagset(message_string, "<beamtime id>") + flagset.StringVar(&flags.SecretFile, "secret", "", "path to file with secret") + + flagset.Parse(cmd.args) + + if printHelp(flagset) { + os.Exit(0) + } + + flags.BeamtimeID = flagset.Arg(0) + + if flags.BeamtimeID == "" { + return flags, errors.New("beamtime id missed ") + } + + if flags.SecretFile == "" { + return flags, errors.New("secret file missed ") + } + + + return flags, nil + +} diff --git a/asapo_tools/src/asapo_tools/cli/token_test.go b/asapo_tools/src/asapo_tools/cli/token_test.go new file mode 100644 index 0000000000000000000000000000000000000000..4d443f3c493f86aceb6156e5638aeb2f802a71bb --- /dev/null +++ b/asapo_tools/src/asapo_tools/cli/token_test.go @@ -0,0 +1,38 @@ +package cli + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "bytes" + "io/ioutil" + "os" +) + +var tokenTests = []struct { + cmd command + answer string + msg string +}{ + {command{args: []string{"beamtime_id"}}, "secret", "no secret parameter"}, + {command{args: []string{"-secret","secret.tmp"}}, "beamtime id", "no file"}, + {command{args: []string{"-secret","not_existing_file","beamtime_id"}}, "not_existing_file", "no file"}, + {command{args: []string{"-secret","secret.tmp","beamtime_id"}}, "eodk3s5ZXwACLGyVA63MZYcOTWuWE4bceI9Vxl9zejI=", "no file"}, +} + +func TestParseTokenFlags(t *testing.T) { + + ioutil.WriteFile("secret.tmp", []byte("secret"), 0644) + outBuf = new(bytes.Buffer) + for _, test := range tokenTests { + err := test.cmd.CommandToken() + if err == nil { + assert.Contains(t, outBuf.(*bytes.Buffer).String(), test.answer, test.msg) + } else { + assert.Contains(t, err.Error(), test.answer, test.msg) + } + + } + os.Remove("secret.tmp") + +} diff --git a/asapo_tools/src/asapo_tools/main/asapo.go b/asapo_tools/src/asapo_tools/main/asapo.go new file mode 100644 index 0000000000000000000000000000000000000000..e326889315ffd4f4c324a83bc646b79aa09acb92 --- /dev/null +++ b/asapo_tools/src/asapo_tools/main/asapo.go @@ -0,0 +1,33 @@ +package main + +import ( + "flag" + "fmt" + "os" + "asapo_tools/version" + "asapo_tools/cli" +) + +var ( + flHelp = flag.Bool("help", false, "Print usage") +) + +func main() { + + if ret := version.ShowVersion(os.Stdout, "asapo"); ret { + return + } + + flag.Parse() + + if *flHelp || flag.NArg() == 0 { + flag.Usage() + cli.PrintAllCommands() + return + } + + if err := cli.DoCommand(flag.Arg(0), flag.Args()[1:]); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} diff --git a/asapo_tools/src/asapo_tools/version/version.go b/asapo_tools/src/asapo_tools/version/version.go new file mode 100644 index 0000000000000000000000000000000000000000..e2427a17c6038e422fb6c7b102562dd254043f33 --- /dev/null +++ b/asapo_tools/src/asapo_tools/version/version.go @@ -0,0 +1,23 @@ +package version + +import ( + "flag" + "fmt" + "io" + "os" +) + +var version, buildTime, gitCommit, shortVersion string + +func ShowVersion(w io.Writer, name string) bool { + flags := flag.NewFlagSet("version", flag.ExitOnError) + flag.Bool("version", false, "Print version information") // to have it in main help + flVersion := flags.Bool("version", false, "Print version information") + flags.Bool("help", false, "Print usage") // define help flag but ignore it + flags.Parse(os.Args[1:]) + if *flVersion { + fmt.Fprintf(w, "%s version %s, build time %s\n", name, version, buildTime) + return true + } + return false +} diff --git a/asapo_tools/src/asapo_tools/version/version_lib.go.in b/asapo_tools/src/asapo_tools/version/version_lib.go.in new file mode 100644 index 0000000000000000000000000000000000000000..3fe1989dbbc62d3515284e43bd2c3e415e874471 --- /dev/null +++ b/asapo_tools/src/asapo_tools/version/version_lib.go.in @@ -0,0 +1,10 @@ +package version + +// Default build-time variable for library-import. +// This file is overridden on build with build-time informations. +func init(){ + gitCommit = "@VERSION_SHA1@" + version = "@VERSION@" + shortVersion = "@VERSION_SHORT@" + buildTime = "@TIMESTAMP@" +} diff --git a/authorizer/CMakeLists.txt b/authorizer/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b4b8165a9541757d1087ae9a41b2f4ea205d2b8 --- /dev/null +++ b/authorizer/CMakeLists.txt @@ -0,0 +1,38 @@ +set (TARGET_NAME asapo-authorizer) + +if (NOT "$ENV{GOPATH}" STREQUAL "") + set(GOPATH $ENV{GOPATH}) +endif() + +if (NOT GOPATH) + message (FATAL_ERROR "GOPATH not set") +endif() + +message(STATUS "global gopath ${GOPATH}") + +IF(WIN32) + set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go") + set (exe_name "${TARGET_NAME}.exe") +ELSE() + set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go) + set (exe_name "${TARGET_NAME}") +ENDIF() + +include(testing_go) + +add_custom_target(asapo-authorizer ALL + COMMAND ${CMAKE_COMMAND} -E env GOPATH=${gopath} + go build ${GO_OPTS} -o ${exe_name} asapo_authorizer/main + VERBATIM) +define_property(TARGET PROPERTY EXENAME + BRIEF_DOCS <executable name> + FULL_DOCS <full-doc>) + +set_target_properties(asapo-authorizer PROPERTIES EXENAME ${CMAKE_CURRENT_BINARY_DIR}/${exe_name}) + + +install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${exe_name} DESTINATION bin) + +gotest(${TARGET_NAME} "./...") +#go_integration_test(${TARGET_NAME}-connectdb "./..." "MongoDBConnect") +#go_integration_test(${TARGET_NAME}-nextrecord "./..." "MongoDBNext") diff --git a/authorizer/src/asapo_authorizer/main/authorizer.go b/authorizer/src/asapo_authorizer/main/authorizer.go new file mode 100644 index 0000000000000000000000000000000000000000..1a0f39f42406a0412735911b9a717d01fba70bd6 --- /dev/null +++ b/authorizer/src/asapo_authorizer/main/authorizer.go @@ -0,0 +1,32 @@ +//+build !test + +package main + +import ( + log "asapo_common/logger" + "asapo_authorizer/server" + "flag" + "os" +) + +func PrintUsage() { + log.Fatal("Usage: " + os.Args[0] + " -config <config file>") +} + +func main() { + var fname = flag.String("config", "", "config file path") + + flag.Parse() + if *fname == "" { + PrintUsage() + } + + logLevel, err := server.ReadConfig(*fname) + if err != nil { + log.Fatal(err.Error()) + } + + log.SetLevel(logLevel) + + server.Start() +} diff --git a/authorizer/src/asapo_authorizer/server/authorize.go b/authorizer/src/asapo_authorizer/server/authorize.go new file mode 100644 index 0000000000000000000000000000000000000000..1326496071884dd7be2057a880379b500583aa25 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/authorize.go @@ -0,0 +1,112 @@ +package server + +import ( + "net/http" + "encoding/json" + "asapo_common/utils" + "path/filepath" + "strings" + log "asapo_common/logger" + "errors" +) + +type authorizationRequest struct { + BeamtimeId string + OriginHost string +} + +func extractRequest(r *http.Request) (request authorizationRequest, err error) { + decoder := json.NewDecoder(r.Body) + err = decoder.Decode(&request) + return +} + +func splitHost(hostPort string) string { + s := strings.Split(hostPort, ":") + return s[0] +} + +func getBeamlineFromIP(ip string) (string, error) { + host := splitHost(ip) + lines, err := utils.ReadStringsFromFile(settings.IpBeamlineMappingFolder + string(filepath.Separator) + host) + if err != nil { + return "", err + } + + if len(lines) < 1 || len(lines[0]) == 0 { + return "", errors.New("file is empty") + } + + return lines[0], nil +} + +func checkBeamtimeExistsInStrings(info beamtimeInfo, lines []string) bool { + for _, line := range lines { + words := strings.Fields(line) + if len(words) < 3 { + continue + } + if words[1] == info.Beamline && words[2] == info.BeamtimeId { + return true + } + } + return false +} + +func beamtimeExists(info beamtimeInfo) bool { + lines, err := utils.ReadStringsFromFile(settings.BeamtimeBeamlineMappingFile) + + if err != nil || len(lines) < 3 { + return false + } + lines = lines[2:] + return checkBeamtimeExistsInStrings(info, lines) +} + +func authorize(request authorizationRequest) (bool, beamtimeInfo) { + for _, pair := range settings.AlwaysAllowedBeamtimes { + if pair.BeamtimeId == request.BeamtimeId { + return true, pair + } + } + var answer beamtimeInfo + + beamline, err := getBeamlineFromIP(request.OriginHost) + if err != nil { + log.Error("cannot find beamline for " + request.OriginHost + " - " + err.Error()) + return false, beamtimeInfo{} + } + + answer.Beamline = beamline + answer.BeamtimeId = request.BeamtimeId + if (!beamtimeExists(answer)) { + log.Error("cannot authorize beamtime " + answer.BeamtimeId + " for " + request.OriginHost + " in " + answer.Beamline) + return false, beamtimeInfo{} + } + log.Debug("authorized beamtime " + answer.BeamtimeId + " for " + request.OriginHost + " in " + answer.Beamline) + + return true, answer + +} + +func routeAuthorize(w http.ResponseWriter, r *http.Request) { + request, err := extractRequest(r) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return + } + ok, beamtimeInfo := authorize(request) + if (!ok) { + w.WriteHeader(http.StatusUnauthorized) + return + } + + res, err := utils.MapToJson(&beamtimeInfo) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + + } + w.WriteHeader(http.StatusOK) + w.Write([]byte(res)) +} diff --git a/authorizer/src/asapo_authorizer/server/authorize_test.go b/authorizer/src/asapo_authorizer/server/authorize_test.go new file mode 100644 index 0000000000000000000000000000000000000000..1c1b39b34d524afc951e03fbdd71d981bceba919 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/authorize_test.go @@ -0,0 +1,162 @@ +package server + +import ( + "asapo_common/utils" + "github.com/stretchr/testify/assert" + "net/http" + "net/http/httptest" + "strings" + "testing" + "io/ioutil" + "os" +) + +type request struct { + path string + cmd string + answer int + message string +} + +func allowBeamlines(beamlines []beamtimeInfo) { + settings.AlwaysAllowedBeamtimes=beamlines +} + + +func containsMatcher(substr string) func(str string) bool { + return func(str string) bool { return strings.Contains(str, substr) } +} + +func makeRequest(request authorizationRequest) string { + buf, _ := utils.MapToJson(request) + return string(buf) +} + +func doAuthorizeRequest(path string,buf string) *httptest.ResponseRecorder { + mux := utils.NewRouter(listRoutes) + req, _ := http.NewRequest("POST", path, strings.NewReader(buf)) + w := httptest.NewRecorder() + mux.ServeHTTP(w, req) + return w +} + +func TestAuthorizeOK(t *testing.T) { + allowBeamlines([]beamtimeInfo{{"asapo_test","beamline"}}) + request := makeRequest(authorizationRequest{"asapo_test","host"}) + w := doAuthorizeRequest("/authorize",request) + + body, _ := ioutil.ReadAll(w.Body) + + assert.Contains(t, string(body), "asapo_test", "") + assert.Contains(t, string(body), "beamline", "") + assert.Equal(t, http.StatusOK, w.Code, "") +} + +func TestNotAuthorized(t *testing.T) { + request := makeRequest(authorizationRequest{"any_id","host"}) + w := doAuthorizeRequest("/authorize",request) + assert.Equal(t, http.StatusUnauthorized, w.Code, "") +} + + +func TestAuthorizeWrongRequest(t *testing.T) { + w := doAuthorizeRequest("/authorize","babla") + assert.Equal(t, http.StatusBadRequest, w.Code, "") +} + + +func TestAuthorizeWrongPath(t *testing.T) { + w := doAuthorizeRequest("/authorized","") + assert.Equal(t, http.StatusNotFound, w.Code, "") +} + +func TestAlwaysAuthorizeAllowed(t *testing.T) { + allowBeamlines([]beamtimeInfo{{"test","beamline"}}) + request := authorizationRequest{"asapo_test","host"} + ok,_ := authorize(request) + assert.Equal(t,false, ok, "") +} + +func TestSplitHost(t *testing.T) { + host := splitHost("127.0.0.1:112") + assert.Equal(t,"127.0.0.1", host, "") +} + + +func TestSplitHostNoPort(t *testing.T) { + host := splitHost("127.0.0.1") + assert.Equal(t,"127.0.0.1", host, "") +} + +func TestGetBeamlineFromIP(t *testing.T) { + beamline, err := getBeamlineFromIP("127.0.0.1:112") + assert.NotNil(t,err, "") + assert.Empty(t,beamline, "") + +} +func TestCheckBeamtimeExistsInStringsFalse(t *testing.T) { + beamInfo := beamtimeInfo{"123","bl"} + lines:=[]string{"111","flash pg2 11003932 beamtime start: 2018-06-11","petra3 p01 c20180508-000-COM20181 commissioning"} + ok := checkBeamtimeExistsInStrings(beamInfo,lines) + assert.False(t,ok, "") +} + + +func TestCheckBeamtimeExistsInStringsOk(t *testing.T) { + beamInfo := beamtimeInfo{"11003932","pg2"} + lines:=[]string{"111","flash pg2 11003932 beamtime start: 2018-06-11","petra3 p01 c20180508-000-COM20181 commissioning"} + ok := checkBeamtimeExistsInStrings(beamInfo,lines) + assert.True(t,ok, "") +} + +func TestAuthorizeWithFile(t *testing.T) { + settings.IpBeamlineMappingFolder="." + settings.BeamtimeBeamlineMappingFile="file.tmp" + + lines :=` +Open beam times as of Thursday, 2018/06/21 11:32 +Faclty BL BeamTime Id kind +flash bl1 11003924 beamtime start: 2018-04-24 +flash bl2 11003921 beamtime start: 2018-06-08 +flash fl24 11001734 beamtime start: 2018-06-13 +flash pg2 11003932 beamtime start: 2018-06-11 +flash thz 11005667 beamtime start: 2018-05-24 +petra3 ext 50000181 beamtime start: 2017-04-12 +petra3 ext 50000193 beamtime start: 2017-10-12 +petra3 ext 50000202 beamtime start: 2017-12-06 +petra3 ext 50000209 beamtime start: 2018-02-19 +petra3 ext 50000211 beamtime start: 2018-02-19 +petra3 ext 50000214 beamtime start: 2018-04-23 +petra3 ext 50000215 beamtime start: 2018-03-23 +petra3 ext 50000216 beamtime start: 2018-03-23 +petra3 ext 50000217 beamtime start: 2018-03-23 +petra3 ext 50000218 beamtime start: 2018-03-23 +petra3 ext 50000219 beamtime start: 2018-04-24 +petra3 ext 50000221 beamtime start: 2018-06-14 +petra3 p01 11004172 beamtime start: 2018-06-20 +petra3 p01 c20180508-000-COM20181 commissioning +petra3 p02.1 11004341 beamtime start: 2018-06-18 +` + + ioutil.WriteFile("file.tmp", []byte(lines), 0644) + ioutil.WriteFile("127.0.0.1", []byte("bl1"), 0644) + + + request := authorizationRequest{"11003924","127.0.0.1"} + w := doAuthorizeRequest("/authorize",makeRequest(request)) + + body, _ := ioutil.ReadAll(w.Body) + assert.Contains(t, string(body), request.BeamtimeId, "") + assert.Contains(t, string(body), "bl1", "") + assert.Equal(t, http.StatusOK, w.Code, "") + + request = authorizationRequest{"wrong","127.0.0.1"} + w = doAuthorizeRequest("/authorize",makeRequest(request)) + assert.Equal(t, http.StatusUnauthorized, w.Code, "") + + os.Remove("127.0.0.1") + os.Remove("file.tmp") + +} + + diff --git a/authorizer/src/asapo_authorizer/server/get_health.go b/authorizer/src/asapo_authorizer/server/get_health.go new file mode 100644 index 0000000000000000000000000000000000000000..b7d9f2446fb62c2c3e7d353172978d4a9682e832 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/get_health.go @@ -0,0 +1,11 @@ +package server + +import ( + "net/http" +) + + +func routeGetHealth(w http.ResponseWriter, r *http.Request) { + r.Header.Set("Content-type", "application/json") + w.WriteHeader(http.StatusNoContent) +} diff --git a/authorizer/src/asapo_authorizer/server/get_health_test.go b/authorizer/src/asapo_authorizer/server/get_health_test.go new file mode 100644 index 0000000000000000000000000000000000000000..fc8d6c2fad3f5fb5e09c5ee74d799d34d9bf8a30 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/get_health_test.go @@ -0,0 +1,18 @@ +package server + +import ( + "github.com/stretchr/testify/assert" + "net/http" + "testing" + "net/http/httptest" + "asapo_common/utils" +) + + +func TestGetNext(t *testing.T) { + mux := utils.NewRouter(listRoutes) + req, _ := http.NewRequest("GET", "/health-check", nil) + w := httptest.NewRecorder() + mux.ServeHTTP(w, req) + assert.Equal(t, http.StatusNoContent, w.Code) +} diff --git a/authorizer/src/asapo_authorizer/server/listroutes.go b/authorizer/src/asapo_authorizer/server/listroutes.go new file mode 100644 index 0000000000000000000000000000000000000000..f32c5c8303ab09a9be2f4b2951f52deecd51bf24 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/listroutes.go @@ -0,0 +1,21 @@ +package server + +import ( + "asapo_common/utils" +) + +var listRoutes = utils.Routes{ + utils.Route{ + "Authorize", + "POST", + "/authorize", + routeAuthorize, + }, + utils.Route{ + "HealthCheck", + "Get", + "/health-check", + routeGetHealth, + }, + +} diff --git a/authorizer/src/asapo_authorizer/server/server.go b/authorizer/src/asapo_authorizer/server/server.go new file mode 100644 index 0000000000000000000000000000000000000000..4215eecec92af09406642566126b4ca964893e22 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/server.go @@ -0,0 +1,18 @@ +package server + + +type beamtimeInfo struct { + BeamtimeId string + Beamline string +} + +type serverSettings struct { + Port int + LogLevel string + IpBeamlineMappingFolder string + BeamtimeBeamlineMappingFile string + AlwaysAllowedBeamtimes []beamtimeInfo +} + +var settings serverSettings + diff --git a/authorizer/src/asapo_authorizer/server/server_nottested.go b/authorizer/src/asapo_authorizer/server/server_nottested.go new file mode 100644 index 0000000000000000000000000000000000000000..6ad0c98f35e819a7c6d4f491dff4829702fcc5d1 --- /dev/null +++ b/authorizer/src/asapo_authorizer/server/server_nottested.go @@ -0,0 +1,31 @@ +//+build !test + +package server + +import ( + log "asapo_common/logger" + "asapo_common/utils" + "errors" + "net/http" + "strconv" +) + +func Start() { + mux := utils.NewRouter(listRoutes) + log.Info("Listening on port: " + strconv.Itoa(settings.Port)) + log.Fatal(http.ListenAndServe(":"+strconv.Itoa(settings.Port), http.HandlerFunc(mux.ServeHTTP))) +} + +func ReadConfig(fname string) (log.Level, error) { + if err := utils.ReadJsonFromFile(fname, &settings); err != nil { + return log.FatalLevel, err + } + + if settings.Port == 0 { + return log.FatalLevel, errors.New("Server port not set") + } + + level, err := log.LevelFromString(settings.LogLevel) + + return level, err +} diff --git a/broker/CMakeLists.txt b/broker/CMakeLists.txt index 4b464e6d1498b6727d7cf57ee0e1e274e136dc9a..49014ff5d141594888f0e713310d1b6ece1942fe 100644 --- a/broker/CMakeLists.txt +++ b/broker/CMakeLists.txt @@ -11,10 +11,10 @@ endif() message(STATUS "global gopath ${GOPATH}") IF(WIN32) - set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}") + set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go") set (exe_name "${TARGET_NAME}.exe") ELSE() - set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}) + set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go) set (exe_name "${TARGET_NAME}") ENDIF() diff --git a/broker/src/asapo_broker/database/mongodb.go b/broker/src/asapo_broker/database/mongodb.go index 55d9b8371a85320c6754aad974f6af1d3b45346b..de41e4f90a062beec6aea811e6fd436c41a7beb2 100644 --- a/broker/src/asapo_broker/database/mongodb.go +++ b/broker/src/asapo_broker/database/mongodb.go @@ -3,7 +3,7 @@ package database import ( - "asapo_broker/utils" + "asapo_common/utils" "encoding/json" "errors" "gopkg.in/mgo.v2" diff --git a/broker/src/asapo_broker/database/mongodb_test.go b/broker/src/asapo_broker/database/mongodb_test.go index cf17a38123464f59ff19dfc63dfcc1dafaa73e45..178346cfc1dfe82ed2a9b0a6b3920b034611f0a3 100644 --- a/broker/src/asapo_broker/database/mongodb_test.go +++ b/broker/src/asapo_broker/database/mongodb_test.go @@ -3,7 +3,7 @@ package database import ( - "asapo_broker/utils" + "asapo_common/utils" "encoding/json" "github.com/stretchr/testify/assert" "sync" diff --git a/broker/src/asapo_broker/logger/logrus_logger.go b/broker/src/asapo_broker/logger/logrus_logger.go deleted file mode 100644 index 875a05c04704efbed0e3c7887ad2aa29aa74a3d0..0000000000000000000000000000000000000000 --- a/broker/src/asapo_broker/logger/logrus_logger.go +++ /dev/null @@ -1,75 +0,0 @@ -package logger - -import ( - log "github.com/sirupsen/logrus" -) - -type logRusLogger struct { - logger_entry *log.Entry -} - -func (l *logRusLogger) entry() *log.Entry { - if l.logger_entry != nil { - return l.logger_entry - } - - formatter := &log.JSONFormatter{ - FieldMap: log.FieldMap{ - log.FieldKeyMsg: "message", - }, - TimestampFormat: "2006-01-02 15:04:05.000", - } - - log.SetFormatter(formatter) - - l.logger_entry = log.WithFields(log.Fields{ - "source": "broker", - }) - - return l.logger_entry - -} - -func (l *logRusLogger) Info(args ...interface{}) { - l.entry().Info(args...) - return -} - -func (l *logRusLogger) Debug(args ...interface{}) { - l.entry().Debug(args...) - return -} - -func (l *logRusLogger) Error(args ...interface{}) { - l.entry().Error(args...) - return -} - -func (l *logRusLogger) Warning(args ...interface{}) { - l.entry().Warning(args...) - return -} - -func (l *logRusLogger) Fatal(args ...interface{}) { - l.entry().Fatal(args...) - return -} - -func (l *logRusLogger) SetLevel(level Level) { - logrusLevel := log.InfoLevel - switch level { - case DebugLevel: - logrusLevel = log.DebugLevel - case InfoLevel: - logrusLevel = log.InfoLevel - case WarnLevel: - logrusLevel = log.WarnLevel - case ErrorLevel: - logrusLevel = log.ErrorLevel - case FatalLevel: - logrusLevel = log.FatalLevel - } - - log.SetLevel(logrusLevel) - return -} diff --git a/broker/src/asapo_broker/main/broker.go b/broker/src/asapo_broker/main/broker.go index b4bb3779f05f7c8074f726a9b3937c26abfc08cd..30a017728a8aaeede6519cdd6b525e5de47e1f7f 100644 --- a/broker/src/asapo_broker/main/broker.go +++ b/broker/src/asapo_broker/main/broker.go @@ -3,10 +3,10 @@ package main import ( - "flag" "asapo_broker/database" - log "asapo_broker/logger" "asapo_broker/server" + log "asapo_common/logger" + "flag" "os" ) diff --git a/broker/src/asapo_broker/server/get_id.go b/broker/src/asapo_broker/server/get_id.go index 50624ba0f442c00e0ea2d2937ee3aaf8234eb90c..f66d61970091d8429d6cc6640315563917322d1c 100644 --- a/broker/src/asapo_broker/server/get_id.go +++ b/broker/src/asapo_broker/server/get_id.go @@ -1,8 +1,8 @@ package server import ( - "asapo_broker/logger" - "asapo_broker/utils" + "asapo_common/logger" + "asapo_common/utils" "github.com/gorilla/mux" "net/http" "strconv" @@ -31,6 +31,11 @@ func routeGetByID(w http.ResponseWriter, r *http.Request) { return } + if err := testAuth(r, db_name); err != nil { + writeAuthAnswer(w, "get id", db_name, err.Error()) + return + } + answer, code := getRecordByID(db_name, id) w.WriteHeader(code) w.Write(answer) diff --git a/broker/src/asapo_broker/server/get_id_test.go b/broker/src/asapo_broker/server/get_id_test.go index b85aa898956ea7e12c5fb45f41248fc1f7be2d85..0e18c932ad0f605c613dce336a1236c9dce1c6f3 100644 --- a/broker/src/asapo_broker/server/get_id_test.go +++ b/broker/src/asapo_broker/server/get_id_test.go @@ -2,8 +2,8 @@ package server import ( "asapo_broker/database" - "asapo_broker/logger" - "asapo_broker/utils" + "asapo_common/logger" + "asapo_common/utils" "errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -28,6 +28,7 @@ type GetIDTestSuite struct { } func (suite *GetIDTestSuite) SetupTest() { + prepareTestAuth() statistics.Reset() suite.mock_db = new(database.MockedDatabase) db = suite.mock_db @@ -46,29 +47,29 @@ func TestGetIDTestSuite(t *testing.T) { } func (suite *GetIDTestSuite) TestGetIDWithWrongDatabaseName() { - suite.mock_db.On("GetRecordByID", "foo", 1).Return([]byte(""), + suite.mock_db.On("GetRecordByID", expectedBeamtimeId, 1).Return([]byte(""), &database.DBError{utils.StatusWrongInput, ""}) - logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get id request in foo"))) + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get id request in"))) - w := doRequest("/database/foo/1") + w := doRequest("/database/" + expectedBeamtimeId + "/1" + correctTokenSuffix) suite.Equal(http.StatusBadRequest, w.Code, "wrong database name") } func (suite *GetIDTestSuite) TestGetIDWithInternalDBError() { - suite.mock_db.On("GetRecordByID", "foo", 1).Return([]byte(""), errors.New("")) - logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get id request in foo"))) + suite.mock_db.On("GetRecordByID", expectedBeamtimeId, 1).Return([]byte(""), errors.New("")) + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get id request in"))) - w := doRequest("/database/foo/1") + w := doRequest("/database/" + expectedBeamtimeId + "/1" + correctTokenSuffix) suite.Equal(http.StatusInternalServerError, w.Code, "internal error") } func (suite *GetIDTestSuite) TestGetIDOK() { - suite.mock_db.On("GetRecordByID", "dbname", 1).Return([]byte("Hello"), nil) - logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get id request in dbname"))) + suite.mock_db.On("GetRecordByID", expectedBeamtimeId, 1).Return([]byte("Hello"), nil) + logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get id request in"))) - w := doRequest("/database/dbname/1") + w := doRequest("/database/" + expectedBeamtimeId + "/1" + correctTokenSuffix) suite.Equal(http.StatusOK, w.Code, "GetID OK") suite.Equal("Hello", string(w.Body.Bytes()), "GetID sends data") } diff --git a/broker/src/asapo_broker/server/get_next.go b/broker/src/asapo_broker/server/get_next.go index 3cc2a826e4c4e649c5543042e9eba4ee609e9108..5a42c3044238600655979d9919491293595c5c1a 100644 --- a/broker/src/asapo_broker/server/get_next.go +++ b/broker/src/asapo_broker/server/get_next.go @@ -2,8 +2,8 @@ package server import ( "asapo_broker/database" - "asapo_broker/logger" - "asapo_broker/utils" + "asapo_common/logger" + "asapo_common/utils" "github.com/gorilla/mux" "net/http" ) @@ -21,6 +21,12 @@ func routeGetNext(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusBadRequest) return } + + if err := testAuth(r, db_name); err != nil { + writeAuthAnswer(w, "get next", db_name, err.Error()) + return + } + answer, code := getNextRecord(db_name) w.WriteHeader(code) w.Write(answer) diff --git a/broker/src/asapo_broker/server/get_next_test.go b/broker/src/asapo_broker/server/get_next_test.go index 97f88e55651086f5a58b1d655ef0977bd143001b..268390973765572c78bcc5dd1de4c2b7a7b87f36 100644 --- a/broker/src/asapo_broker/server/get_next_test.go +++ b/broker/src/asapo_broker/server/get_next_test.go @@ -1,19 +1,33 @@ package server import ( + "asapo_broker/database" + "asapo_common/logger" + "asapo_common/utils" "errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" - "asapo_broker/database" - "asapo_broker/logger" - "asapo_broker/utils" "net/http" "net/http/httptest" "strings" "testing" ) +var correctTokenSuffix, wrongTokenSuffix, suffixWithWrongToken, expectedBeamtimeId string + +func prepareTestAuth() { + expectedBeamtimeId = "beamtime_id" + auth = utils.NewHMACAuth("secret") + token, err := auth.GenerateToken(&expectedBeamtimeId) + if err != nil { + panic(err) + } + correctTokenSuffix = "?token=" + token + wrongTokenSuffix = "?blablabla=aa" + suffixWithWrongToken = "?token=blabla" +} + type request struct { path string cmd string @@ -52,8 +66,8 @@ func (suite *GetNextTestSuite) SetupTest() { statistics.Reset() suite.mock_db = new(database.MockedDatabase) db = suite.mock_db + prepareTestAuth() logger.SetMockLog() - ExpectCopyClose(suite.mock_db) } func (suite *GetNextTestSuite) TearDownTest() { @@ -66,38 +80,58 @@ func TestGetNextTestSuite(t *testing.T) { suite.Run(t, new(GetNextTestSuite)) } +func (suite *GetNextTestSuite) TestGetNextWithWrongToken() { + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("wrong token"))) + + w := doRequest("/database/" + expectedBeamtimeId + "/next" + suffixWithWrongToken) + + suite.Equal(http.StatusUnauthorized, w.Code, "wrong token") +} + +func (suite *GetNextTestSuite) TestGetNextWithNoToken() { + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("cannot extract"))) + + w := doRequest("/database/" + expectedBeamtimeId + "/next" + wrongTokenSuffix) + + suite.Equal(http.StatusUnauthorized, w.Code, "no token") +} + func (suite *GetNextTestSuite) TestGetNextWithWrongDatabaseName() { - suite.mock_db.On("GetNextRecord", "foo").Return([]byte(""), + suite.mock_db.On("GetNextRecord", expectedBeamtimeId).Return([]byte(""), &database.DBError{utils.StatusWrongInput, ""}) - logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get next request in foo"))) + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get next request"))) + ExpectCopyClose(suite.mock_db) - w := doRequest("/database/foo/next") + w := doRequest("/database/" + expectedBeamtimeId + "/next" + correctTokenSuffix) suite.Equal(http.StatusBadRequest, w.Code, "wrong database name") } func (suite *GetNextTestSuite) TestGetNextWithInternalDBError() { - suite.mock_db.On("GetNextRecord", "foo").Return([]byte(""), errors.New("")) - logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get next request in foo"))) + suite.mock_db.On("GetNextRecord", expectedBeamtimeId).Return([]byte(""), errors.New("")) + logger.MockLog.On("Error", mock.MatchedBy(containsMatcher("get next request"))) + ExpectCopyClose(suite.mock_db) - w := doRequest("/database/foo/next") + w := doRequest("/database/" + expectedBeamtimeId + "/next" + correctTokenSuffix) suite.Equal(http.StatusInternalServerError, w.Code, "internal error") } func (suite *GetNextTestSuite) TestGetNextWithGoodDatabaseName() { - suite.mock_db.On("GetNextRecord", "dbname").Return([]byte("Hello"), nil) - logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get next request in dbname"))) + suite.mock_db.On("GetNextRecord", expectedBeamtimeId).Return([]byte("Hello"), nil) + logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get next request"))) + ExpectCopyClose(suite.mock_db) - w := doRequest("/database/dbname/next") + w := doRequest("/database/" + expectedBeamtimeId + "/next" + correctTokenSuffix) suite.Equal(http.StatusOK, w.Code, "GetNext OK") suite.Equal("Hello", string(w.Body.Bytes()), "GetNext sends data") } func (suite *GetNextTestSuite) TestGetNextAddsCounter() { - suite.mock_db.On("GetNextRecord", "dbname").Return([]byte("Hello"), nil) - logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get next request in dbname"))) + suite.mock_db.On("GetNextRecord", expectedBeamtimeId).Return([]byte("Hello"), nil) + logger.MockLog.On("Debug", mock.MatchedBy(containsMatcher("get next request in "+expectedBeamtimeId))) + ExpectCopyClose(suite.mock_db) - doRequest("/database/dbname/next") + doRequest("/database/" + expectedBeamtimeId + "/next" + correctTokenSuffix) suite.Equal(1, statistics.GetCounter(), "GetNext increases counter") } diff --git a/broker/src/asapo_broker/server/listroutes.go b/broker/src/asapo_broker/server/listroutes.go index 2a6f70ecc1de6e6dc6517d540d6609ca6e00d7ac..ceee311d46043ccbaca28e037b07c4aa97457d3e 100644 --- a/broker/src/asapo_broker/server/listroutes.go +++ b/broker/src/asapo_broker/server/listroutes.go @@ -1,7 +1,7 @@ package server import ( - "asapo_broker/utils" + "asapo_common/utils" ) var listRoutes = utils.Routes{ diff --git a/broker/src/asapo_broker/server/request_common.go b/broker/src/asapo_broker/server/request_common.go new file mode 100644 index 0000000000000000000000000000000000000000..775ddd9bdebd8343e62b5c6c9729510f7ebbd084 --- /dev/null +++ b/broker/src/asapo_broker/server/request_common.go @@ -0,0 +1,30 @@ +package server + +import ( + "asapo_common/logger" + "errors" + "net/http" +) + +func writeAuthAnswer(w http.ResponseWriter, requestName string, db_name string, err string) { + log_str := "processing " + requestName + " request in " + db_name + " at " + settings.BrokerDbAddress + logger.Error(log_str + " - " + err) + w.WriteHeader(http.StatusUnauthorized) + w.Write([]byte(err)) +} + +func testAuth(r *http.Request, beamtime_id string) error { + token_got := r.URL.Query().Get("token") + + if len(token_got) == 0 { + return errors.New("cannot extract token from request") + } + + token_expect, _ := auth.GenerateToken(&beamtime_id) + + if token_got != token_expect { + return errors.New("wrong token") + } + + return nil +} diff --git a/broker/src/asapo_broker/server/server.go b/broker/src/asapo_broker/server/server.go index d1073b6322c865046e8a447f00c98b14e16471ec..39213b7b1c3edd8e8edcb7e5bebf9d98ed5802d0 100644 --- a/broker/src/asapo_broker/server/server.go +++ b/broker/src/asapo_broker/server/server.go @@ -2,6 +2,7 @@ package server import ( "asapo_broker/database" + "asapo_common/utils" ) var db database.Agent @@ -10,12 +11,14 @@ type serverSettings struct { BrokerDbAddress string MonitorDbAddress string MonitorDbName string + SecretFile string Port int LogLevel string } var settings serverSettings var statistics serverStatistics +var auth utils.Auth func InitDB(dbAgent database.Agent) error { db = dbAgent diff --git a/broker/src/asapo_broker/server/server_nottested.go b/broker/src/asapo_broker/server/server_nottested.go index 7c050275a3d806a998709f780646f2760d8eebcb..c9febf74b9fe37065d7f49387c912f67cf5f786e 100644 --- a/broker/src/asapo_broker/server/server_nottested.go +++ b/broker/src/asapo_broker/server/server_nottested.go @@ -3,9 +3,9 @@ package server import ( + log "asapo_common/logger" + "asapo_common/utils" "errors" - log "asapo_broker/logger" - "asapo_broker/utils" "net/http" "strconv" ) @@ -23,6 +23,14 @@ func Start() { log.Fatal(http.ListenAndServe(":"+strconv.Itoa(settings.Port), http.HandlerFunc(mux.ServeHTTP))) } +func createAuth() (utils.Auth, error) { + secret, err := utils.ReadFirstStringFromFile(settings.SecretFile) + if err != nil { + return nil, err + } + return utils.NewHMACAuth(secret), nil +} + func ReadConfig(fname string) (log.Level, error) { if err := utils.ReadJsonFromFile(fname, &settings); err != nil { return log.FatalLevel, err @@ -44,6 +52,16 @@ func ReadConfig(fname string) (log.Level, error) { return log.FatalLevel, errors.New("MonitorDbName not set") } + if settings.SecretFile == "" { + return log.FatalLevel, errors.New("Secret file not set") + } + + var err error + auth, err = createAuth() + if err != nil { + return log.FatalLevel, err + } + level, err := log.LevelFromString(settings.LogLevel) return level, err diff --git a/broker/src/asapo_broker/server/server_test.go b/broker/src/asapo_broker/server/server_test.go index 4a8ae87db086a1bbb8423ece8b62040e5d3288c0..6654b67159137c566cfcc7e97dc3bfd42070e0d8 100644 --- a/broker/src/asapo_broker/server/server_test.go +++ b/broker/src/asapo_broker/server/server_test.go @@ -1,11 +1,11 @@ package server import ( + "asapo_broker/database" + "asapo_common/logger" "errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" - "asapo_broker/database" - "asapo_broker/logger" "testing" ) diff --git a/broker/src/asapo_broker/server/statistics.go b/broker/src/asapo_broker/server/statistics.go index 7cad4056cea3c0a02f93f13c9b3c22b8b29f342e..97c6896f79252fb8f732bf6c1b6b5ba4f6bd8ac9 100644 --- a/broker/src/asapo_broker/server/statistics.go +++ b/broker/src/asapo_broker/server/statistics.go @@ -1,8 +1,8 @@ package server import ( + log "asapo_common/logger" "fmt" - log "asapo_broker/logger" "sync" "time" ) diff --git a/broker/src/asapo_broker/utils/helpers.go b/broker/src/asapo_broker/utils/helpers.go deleted file mode 100644 index f1d0da4dc985d0231cbe34602f1942ce01fde362..0000000000000000000000000000000000000000 --- a/broker/src/asapo_broker/utils/helpers.go +++ /dev/null @@ -1,38 +0,0 @@ -package utils - -import ( - json "encoding/json" - "io/ioutil" -) - -func StringInSlice(a string, list []string) bool { - for _, b := range list { - if b == a { - return true - } - } - return false -} - -func MapToJson(res interface{}) ([]byte, error) { - answer, err := json.Marshal(res) - if err == nil { - return answer, nil - } else { - return nil, err - } -} - -func ReadJsonFromFile(fname string, config interface{}) error { - content, err := ioutil.ReadFile(fname) - if err != nil { - return err - } - - err = json.Unmarshal(content, config) - if err != nil { - return err - } - - return nil -} diff --git a/broker/src/asapo_broker/utils/routes.go b/broker/src/asapo_broker/utils/routes.go deleted file mode 100644 index 8c8d88b270a1b9d4a13cc4d59a5b431e82fd3787..0000000000000000000000000000000000000000 --- a/broker/src/asapo_broker/utils/routes.go +++ /dev/null @@ -1,37 +0,0 @@ -package utils - -import ( - "net/http" - - "github.com/gorilla/mux" - "strings" -) - -type Routes []Route - -type Route struct { - Name string - Method string - Pattern string - HandlerFunc http.HandlerFunc -} - -func NewRouter(listRoutes Routes) *mux.Router { - router := mux.NewRouter() - for _, route := range listRoutes { - router. - Methods(route.Method). - Path(route.Pattern). - Name(route.Name). - Handler(route.HandlerFunc) - // allow routes without trailing slash - if strings.HasSuffix(route.Pattern, "/") { - router. - Methods(route.Method). - Path(strings.TrimSuffix(route.Pattern, "/")). - Name(route.Name + "_noslash"). - Handler(route.HandlerFunc) - } - } - return router -} diff --git a/common/cpp/include/common/networking.h b/common/cpp/include/common/networking.h index 7f59eb7cfd74afba30a696ec53e78da272a61df5..77814fc9831fa3df423749b0cfedad58e1d8b042 100644 --- a/common/cpp/include/common/networking.h +++ b/common/cpp/include/common/networking.h @@ -11,13 +11,15 @@ namespace asapo { typedef uint64_t NetworkRequestId; enum Opcode : uint8_t { - kOpcodeUnknownOp, + kOpcodeUnknownOp = 1, kOpcodeTransferData, + kOpcodeAuthorize, kOpcodeCount, }; enum NetworkErrorCode : uint16_t { kNetErrorNoError, + kNetAuthorizationError, kNetErrorFileIdAlreadyInUse, kNetErrorAllocateStorageFailed, kNetErrorInternalServerError = 65535, @@ -25,40 +27,30 @@ enum NetworkErrorCode : uint16_t { //TODO need to use an serialization framework to ensure struct consistency on different computers -/** - * @defgroup RPC - * RPC always return a response to a corresponding request - * @{ - */ +const std::size_t kMaxMessageSize = 1024; -const std::size_t kMaxFileNameSize = 1024; struct GenericRequestHeader { GenericRequestHeader(Opcode i_op_code = kOpcodeUnknownOp, uint64_t i_data_id = 0, - uint64_t i_data_size = 0, const std::string& i_file_name = ""): + uint64_t i_data_size = 0, const std::string& i_message = ""): op_code{i_op_code}, data_id{i_data_id}, data_size{i_data_size} { - auto size = std::min(i_file_name.size() + 1, kMaxFileNameSize); - memcpy(file_name, i_file_name.c_str(), size); + strncpy(message, i_message.c_str(), kMaxMessageSize); } Opcode op_code; uint64_t data_id; uint64_t data_size; - char file_name[kMaxFileNameSize]; + char message[kMaxMessageSize]; }; struct GenericNetworkResponse { Opcode op_code; NetworkRequestId request_id; NetworkErrorCode error_code; + char message[kMaxMessageSize]; }; -/** - * Possible error codes: - * - ::NET_ERR__FILENAME_ALREADY_IN_USE - * - ::NET_ERR__ALLOCATE_STORAGE_FAILED - */ + struct SendDataResponse : GenericNetworkResponse { }; -/** @} */ } diff --git a/broker/src/asapo_broker/logger/logger.go b/common/go/src/asapo_common/logger/logger.go similarity index 100% rename from broker/src/asapo_broker/logger/logger.go rename to common/go/src/asapo_common/logger/logger.go diff --git a/discovery/src/asapo_discovery/logger/logrus_logger.go b/common/go/src/asapo_common/logger/logrus_logger.go similarity index 100% rename from discovery/src/asapo_discovery/logger/logrus_logger.go rename to common/go/src/asapo_common/logger/logrus_logger.go diff --git a/broker/src/asapo_broker/logger/mock_logger.go b/common/go/src/asapo_common/logger/mock_logger.go similarity index 100% rename from broker/src/asapo_broker/logger/mock_logger.go rename to common/go/src/asapo_common/logger/mock_logger.go diff --git a/common/go/src/asapo_common/utils/authorization.go b/common/go/src/asapo_common/utils/authorization.go new file mode 100644 index 0000000000000000000000000000000000000000..48ba8e4b8505ea1c502b7c34709796f889a07a68 --- /dev/null +++ b/common/go/src/asapo_common/utils/authorization.go @@ -0,0 +1,242 @@ +package utils + +import ( + "errors" + "net/http" + "net/url" + "strings" + "context" + "github.com/dgrijalva/jwt-go" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" +) + +type AuthorizationRequest struct { + Token string + Command string + URL string +} + +type AuthorizationResponce struct { + Status int + StatusText string + UserName string + Token string + ValidityTime int +} + +type Auth interface { + GenerateToken(...interface{}) (string, error) + Name() string +} + + +func (a *JWTAuth) Name() string { + return "Bearer" +} + + +func stripURL(u *url.URL) string { + s := u.Path + u.RawQuery + s = strings.Replace(s, "/", "", -1) + s = strings.Replace(s, "?", "", -1) + return s + +} + +func SplitAuthToken(s string) (authType, token string, err error) { + keys := strings.Split(s, " ") + + if len(keys) != 2 { + err = errors.New("authorization error - wrong token") + return + } + + authType = keys[0] + token = keys[1] + return +} + +func ExtractAuthInfo(r *http.Request) (authType, token string, err error) { + + t := r.Header.Get("Authorization") + + if t != "" { + return SplitAuthToken(t) + } + + cookie, err := r.Cookie("Authorization") + if err == nil { + return SplitAuthToken(cookie.Value) + } + + err = errors.New("no authorization info") + return + +} + +type CustomClaims struct { + jwt.StandardClaims + ExtraClaims interface{} +} + +type JobClaim struct { + BeamtimeId string +} + +type JWTAuth struct { + Key string +} + +func NewJWTAuth(key string) *JWTAuth { + a := JWTAuth{key} + return &a +} + +func (t JWTAuth) GenerateToken(val ...interface{}) (string, error) { + if len(val) != 1 { + return "", errors.New("No claims") + } + claims, ok := val[0].(*CustomClaims) + if !ok { + return "", errors.New("Wrong claims") + } + +// if claims.Duration > 0 { +// claims.ExpiresAt = time.Now().Add(claims.Duration).Unix() +// } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + tokenString, err := token.SignedString([]byte(t.Key)) + + if err != nil { + return "", err + } + + return tokenString, nil +} + +func ProcessJWTAuth(fn http.HandlerFunc, key string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + authType, token, err := ExtractAuthInfo(r) + + if err != nil { + http.Error(w, err.Error(), http.StatusUnauthorized) + return + } + + ctx := r.Context() + + if authType == "Bearer" { + if claims, ok := CheckJWTToken(token, key); !ok { + http.Error(w, "Internal authorization error - tocken does not match", http.StatusUnauthorized) + return + } else { + ctx = context.WithValue(ctx, "JobClaim", claims) + } + } else { + http.Error(w, "Internal authorization error - wrong auth type", http.StatusUnauthorized) + return + } + fn(w, r.WithContext(ctx)) + } +} + +func CheckJWTToken(token, key string) (jwt.Claims, bool) { + + if token == "" { + return nil, false + } + + t, err := jwt.ParseWithClaims(token, &CustomClaims{}, func(token *jwt.Token) (interface{}, error) { + return []byte(key), nil + }) + + if err == nil && t.Valid { + return t.Claims, true + } + + return nil, false +} + +func JobClaimFromContext(r *http.Request, val interface{}) error { + c := r.Context().Value("JobClaim") + + if c == nil { + return errors.New("Empty context") + } + + claim := c.(*CustomClaims) + + return MapToStruct(claim.ExtraClaims.(map[string]interface{}), val) +} + +type HMACAuth struct { + Key string +} + +func NewHMACAuth(key string) *HMACAuth { + a := HMACAuth{key} + return &a +} + +func (a *HMACAuth) Name() string { + return "Bearer" +} + + +func generateHMACToken(value string, key string) string { + mac := hmac.New(sha256.New, []byte(key)) + mac.Write([]byte(value)) + + return base64.URLEncoding.EncodeToString(mac.Sum(nil)) +} + +func (h HMACAuth) GenerateToken(val ...interface{}) (string, error) { + if len(val) != 1 { + return "", errors.New("Wrong claims") + } + value, ok := val[0].(*string) + if !ok { + return "", errors.New("Wrong claims") + } + + sha := generateHMACToken(*value, h.Key) + return sha, nil +} + +func ProcessHMACAuth(fn http.HandlerFunc, key string) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + + authType, token, err := ExtractAuthInfo(r) + + if err != nil { + http.Error(w, err.Error(), http.StatusUnauthorized) + return + } + // todo extract beamline from request + value := "beamline" + if authType == "HMAC-SHA-256" { + if !checkHMACToken(value, token, key) { + http.Error(w, "Internal authorization error - tocken does not match", http.StatusUnauthorized) + return + } + } else { + http.Error(w, "Internal authorization error - wrong auth type", http.StatusUnauthorized) + return + } + fn(w, r) + } +} + +func checkHMACToken(value string, token, key string) bool { + + if token == "" { + return false + } + + generated_token := generateHMACToken(value, key) + return token == generated_token +} diff --git a/common/go/src/asapo_common/utils/helpers.go b/common/go/src/asapo_common/utils/helpers.go new file mode 100644 index 0000000000000000000000000000000000000000..94f0fdfa695044120d6c7c19ef334d0c102a1fa8 --- /dev/null +++ b/common/go/src/asapo_common/utils/helpers.go @@ -0,0 +1,78 @@ +package utils + +import ( + json "encoding/json" + "io/ioutil" + "strings" + "errors" +) + +func StringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +func MapToJson(res interface{}) ([]byte, error) { + answer, err := json.Marshal(res) + if err == nil { + return answer, nil + } else { + return nil, err + } +} + +func ReadJsonFromFile(fname string, config interface{}) error { + content, err := ioutil.ReadFile(fname) + if err != nil { + return err + } + + err = json.Unmarshal(content, config) + if err != nil { + return err + } + + return nil +} + + +func ReadStringsFromFile(fname string) ([]string, error) { + content, err := ioutil.ReadFile(fname) + if err != nil { + return []string{},err + } + lines := strings.Split(string(content), "\n") + + return lines,nil +} + + +func ReadFirstStringFromFile(fname string) (string, error) { + lines,err := ReadStringsFromFile(fname) + if err != nil { + return "",err + } + + if len(lines)==0 { + return "",errors.New("empty file") + } + + return lines[0],nil +} + + +func MapToStruct(m map[string]interface{}, val interface{}) error { + tmp, err := json.Marshal(m) + if err != nil { + return err + } + err = json.Unmarshal(tmp, val) + if err != nil { + return err + } + return nil +} diff --git a/discovery/src/asapo_discovery/utils/routes.go b/common/go/src/asapo_common/utils/routes.go similarity index 100% rename from discovery/src/asapo_discovery/utils/routes.go rename to common/go/src/asapo_common/utils/routes.go diff --git a/broker/src/asapo_broker/utils/status_codes.go b/common/go/src/asapo_common/utils/status_codes.go similarity index 100% rename from broker/src/asapo_broker/utils/status_codes.go rename to common/go/src/asapo_common/utils/status_codes.go diff --git a/discovery/src/asapo_discovery/utils/stucts.go b/common/go/src/asapo_common/utils/stucts.go similarity index 100% rename from discovery/src/asapo_discovery/utils/stucts.go rename to common/go/src/asapo_common/utils/stucts.go diff --git a/config/nomad/authorizer.nmd.in b/config/nomad/authorizer.nmd.in new file mode 100644 index 0000000000000000000000000000000000000000..e75992113d54a93c17de3d20b28d84d6289e509e --- /dev/null +++ b/config/nomad/authorizer.nmd.in @@ -0,0 +1,49 @@ +job "authorizer" { + datacenters = ["dc1"] + + type = "service" + + group "group" { + count = 1 + + task "service" { + driver = "raw_exec" + + config { + command = "@AUTHORIZER_FULLPATH@", + args = ["-config","${NOMAD_TASK_DIR}/authorizer.json"] + } + + resources { + cpu = 500 # 500 MHz + memory = 256 # 256MB + network { + port "authorizer" { + static = "5007" + } + } + } + + service { + name = "authorizer" + port = "authorizer" + check { + name = "alive" + type = "http" + path = "/health-check" + interval = "10s" + timeout = "2s" + initial_status = "passing" + } + } + + template { + source = "@WORK_DIR@/authorizer.json.tpl" + destination = "local/authorizer.json" + change_mode = "signal" + change_signal = "SIGHUP" + } + + } + } +} diff --git a/config/nomad/broker.nmd.in b/config/nomad/broker.nmd.in index 1d968aa427fc8816fa75ba1d82ffb491870b3432..211c71e21a640256e1b4aea1f334eed4bed3fa45 100644 --- a/config/nomad/broker.nmd.in +++ b/config/nomad/broker.nmd.in @@ -44,6 +44,12 @@ job "broker" { change_signal = "SIGHUP" } + template { + source = "@WORK_DIR@/broker_secret.key" + destination = "broker_secret.key" + change_mode = "signal" + change_signal = "SIGHUP" + } } } } diff --git a/config/nomad/nginx.nmd.in b/config/nomad/nginx.nmd.in index da6826764f1c7f2ce9979246c2440aee482d7891..76d628eaa2d65d3b6b88c9c08bdeae6f71c0dfda 100644 --- a/config/nomad/nginx.nmd.in +++ b/config/nomad/nginx.nmd.in @@ -45,7 +45,7 @@ job "nginx" { check { name = "alive" type = "http" - path = "/nginx_health" + path = "/nginx-health" timeout = "2s" interval = "10s" } diff --git a/discovery/CMakeLists.txt b/discovery/CMakeLists.txt index f17018ce17f5374d00b4e096708fe82660cb99e1..871e6e93faf61f313071790d6c72d206d756cdf6 100644 --- a/discovery/CMakeLists.txt +++ b/discovery/CMakeLists.txt @@ -11,10 +11,10 @@ endif() message(STATUS "global gopath ${GOPATH}") IF(WIN32) - set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}") + set (gopath "${GOPATH}\;${CMAKE_CURRENT_SOURCE_DIR}\;${CMAKE_SOURCE_DIR}/common/go") set (exe_name "${TARGET_NAME}.exe") ELSE() - set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}) + set (gopath ${GOPATH}:${CMAKE_CURRENT_SOURCE_DIR}:${CMAKE_SOURCE_DIR}/common/go) set (exe_name "${TARGET_NAME}") ENDIF() diff --git a/discovery/src/asapo_discovery/logger/logger.go b/discovery/src/asapo_discovery/logger/logger.go deleted file mode 100644 index e198cd23d555819c34ddb9da020909e987653bc0..0000000000000000000000000000000000000000 --- a/discovery/src/asapo_discovery/logger/logger.go +++ /dev/null @@ -1,69 +0,0 @@ -package logger - -import ( - "errors" - "strings" -) - -type Level uint32 - -//log levels -const ( - InfoLevel = iota - DebugLevel - ErrorLevel - WarnLevel - FatalLevel -) - -type Logger interface { - Info(args ...interface{}) - Debug(args ...interface{}) - Fatal(args ...interface{}) - Warning(args ...interface{}) - Error(args ...interface{}) - SetLevel(level Level) -} - -var my_logger Logger = &logRusLogger{} - -func Info(args ...interface{}) { - my_logger.Info(args...) -} - -func Debug(args ...interface{}) { - my_logger.Debug(args...) -} - -func Warning(args ...interface{}) { - my_logger.Warning(args...) -} - -func Error(args ...interface{}) { - my_logger.Error(args...) -} - -func Fatal(args ...interface{}) { - my_logger.Fatal(args...) -} - -func SetLevel(level Level) { - my_logger.SetLevel(level) -} - -func LevelFromString(str string) (Level, error) { - switch strings.ToLower(str) { - case "debug": - return DebugLevel, nil - case "info": - return InfoLevel, nil - case "warning": - return WarnLevel, nil - case "error": - return ErrorLevel, nil - case "fatal", "none": - return FatalLevel, nil - } - return FatalLevel, errors.New("wrong log level") - -} diff --git a/discovery/src/asapo_discovery/logger/mock_logger.go b/discovery/src/asapo_discovery/logger/mock_logger.go deleted file mode 100644 index 0e597978bcb3be1739f6beb7eb9620e3d636b9a5..0000000000000000000000000000000000000000 --- a/discovery/src/asapo_discovery/logger/mock_logger.go +++ /dev/null @@ -1,51 +0,0 @@ -//+build !release - -package logger - -import ( - "github.com/stretchr/testify/mock" -) - -type MockLogger struct { - mock.Mock -} - -var MockLog MockLogger - -func SetMockLog() { - my_logger = &MockLog -} - -func UnsetMockLog() { - my_logger = &logRusLogger{} -} - -func (l *MockLogger) Info(args ...interface{}) { - l.Called(args...) - return -} - -func (l *MockLogger) Debug(args ...interface{}) { - l.Called(args...) - return -} - -func (l *MockLogger) Error(args ...interface{}) { - l.Called(args...) - return -} - -func (l *MockLogger) Warning(args ...interface{}) { - l.Called(args...) - return -} - -func (l *MockLogger) Fatal(args ...interface{}) { - l.Called(args...) - return -} - -func (l *MockLogger) SetLevel(level Level) { - l.Called(level) - return -} diff --git a/discovery/src/asapo_discovery/main/discovery.go b/discovery/src/asapo_discovery/main/discovery.go index cf9ed15818093991a31916b6fca2d8817f499442..683b80472cd936661dee73b890952db860be7bcb 100644 --- a/discovery/src/asapo_discovery/main/discovery.go +++ b/discovery/src/asapo_discovery/main/discovery.go @@ -4,7 +4,7 @@ package main import ( "flag" - log "asapo_discovery/logger" + log "asapo_common/logger" "asapo_discovery/server" "os" "asapo_discovery/request_handler" diff --git a/discovery/src/asapo_discovery/request_handler/request_handler.go b/discovery/src/asapo_discovery/request_handler/request_handler.go index b2ce9b5601af3c2df091c926e6ef54e6acc5ab11..7c33af3f8c210c95ef73e37b523b8cfcdc9d253f 100644 --- a/discovery/src/asapo_discovery/request_handler/request_handler.go +++ b/discovery/src/asapo_discovery/request_handler/request_handler.go @@ -1,6 +1,6 @@ package request_handler -import "asapo_discovery/utils" +import "asapo_common/utils" type Agent interface { GetReceivers() ([]byte, error) diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_consul.go b/discovery/src/asapo_discovery/request_handler/request_handler_consul.go index c4a948a4b26aaea7608e7b29188966c376b9550e..12f3dc13c98f64a0255a2ab05dd2d3194a5affe1 100644 --- a/discovery/src/asapo_discovery/request_handler/request_handler_consul.go +++ b/discovery/src/asapo_discovery/request_handler/request_handler_consul.go @@ -1,7 +1,7 @@ package request_handler import ( - "asapo_discovery/utils" + "asapo_common/utils" "github.com/hashicorp/consul/api" "strconv" "errors" diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go b/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go index e3e5b78c9378a0f4d0bb1271386db745b24fcd3a..716d545310d28be660ba634aa5b9a6a5680675be 100644 --- a/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go +++ b/discovery/src/asapo_discovery/request_handler/request_handler_consul_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/hashicorp/consul/api" "strconv" - "asapo_discovery/utils" + "asapo_common/utils" ) type ConsulHandlerTestSuite struct { diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_static.go b/discovery/src/asapo_discovery/request_handler/request_handler_static.go index 1fbe2b4a3edf2a7ef907f97ebe167ce27261983b..f9668a541b435199b18325830f482869b2a5ff4f 100644 --- a/discovery/src/asapo_discovery/request_handler/request_handler_static.go +++ b/discovery/src/asapo_discovery/request_handler/request_handler_static.go @@ -1,7 +1,7 @@ package request_handler import ( - "asapo_discovery/utils" + "asapo_common/utils" ) type StaticRequestHandler struct { diff --git a/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go b/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go index af9bf7d80efb0c2d3c23bea6c21d65d847194b14..eaeed6fab59cb018123a9d3e194b5147dd5d7a80 100644 --- a/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go +++ b/discovery/src/asapo_discovery/request_handler/request_handler_static_test.go @@ -3,7 +3,7 @@ package request_handler import ( "github.com/stretchr/testify/assert" "testing" - "asapo_discovery/utils" + "asapo_common/utils" ) diff --git a/discovery/src/asapo_discovery/server/get_receivers.go b/discovery/src/asapo_discovery/server/get_receivers.go index 70cb2249cb4a4c0270f78ba59bb9a48c96d91c3c..8f946dd7b5781e91b3deb0cbae88735be365a42e 100644 --- a/discovery/src/asapo_discovery/server/get_receivers.go +++ b/discovery/src/asapo_discovery/server/get_receivers.go @@ -2,7 +2,7 @@ package server import ( "net/http" - "asapo_discovery/logger" + "asapo_common/logger" "errors" ) diff --git a/discovery/src/asapo_discovery/server/listroutes.go b/discovery/src/asapo_discovery/server/listroutes.go index ed068c3430ed7c61249515db7e2e05f26a785ac8..b6f36de2d9a1bb1883b468b4b4efbb1c4960c715 100644 --- a/discovery/src/asapo_discovery/server/listroutes.go +++ b/discovery/src/asapo_discovery/server/listroutes.go @@ -1,7 +1,7 @@ package server import ( - "asapo_discovery/utils" + "asapo_common/utils" ) var listRoutes = utils.Routes{ diff --git a/discovery/src/asapo_discovery/server/routes_test.go b/discovery/src/asapo_discovery/server/routes_test.go index 4d35c2c090bfd6c597ec105a7937e5038cff92a5..a983fa2b9ab58094d5d60686e2cfa9c9782eb31c 100644 --- a/discovery/src/asapo_discovery/server/routes_test.go +++ b/discovery/src/asapo_discovery/server/routes_test.go @@ -3,8 +3,8 @@ package server import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" - "asapo_discovery/logger" - "asapo_discovery/utils" + "asapo_common/logger" + "asapo_common/utils" "net/http" "net/http/httptest" "strings" diff --git a/discovery/src/asapo_discovery/server/server.go b/discovery/src/asapo_discovery/server/server.go index fb1bb5d5c629a18c517c3193bd56730ac18e9c8a..63379b876c2525776bdafe5749fc9bfd76f0455a 100644 --- a/discovery/src/asapo_discovery/server/server.go +++ b/discovery/src/asapo_discovery/server/server.go @@ -2,7 +2,7 @@ package server import ( "asapo_discovery/request_handler" - "asapo_discovery/utils" + "asapo_common/utils" ) var requestHandler request_handler.Agent diff --git a/discovery/src/asapo_discovery/server/server_nottested.go b/discovery/src/asapo_discovery/server/server_nottested.go index d193d866331875e5ec8801758b3a5745d51ba69d..3b98cf5ea9b77caef8853d806fa5a7d40539198a 100644 --- a/discovery/src/asapo_discovery/server/server_nottested.go +++ b/discovery/src/asapo_discovery/server/server_nottested.go @@ -3,8 +3,8 @@ package server import ( - log "asapo_discovery/logger" - "asapo_discovery/utils" + log "asapo_common/logger" + "asapo_common/utils" "net/http" "strconv" ) diff --git a/discovery/src/asapo_discovery/server/settings_test.go b/discovery/src/asapo_discovery/server/settings_test.go index 2d85beb1da0aa200187f88616d44a15be0035506..50307b06a23a089dfa88d53f5ae57d0b0a83e8e0 100644 --- a/discovery/src/asapo_discovery/server/settings_test.go +++ b/discovery/src/asapo_discovery/server/settings_test.go @@ -3,7 +3,7 @@ package server import ( "github.com/stretchr/testify/assert" "testing" - "asapo_discovery/utils" + "asapo_common/utils" ) func fillSettings(mode string) utils.Settings { diff --git a/discovery/src/asapo_discovery/utils/helpers.go b/discovery/src/asapo_discovery/utils/helpers.go deleted file mode 100644 index f1d0da4dc985d0231cbe34602f1942ce01fde362..0000000000000000000000000000000000000000 --- a/discovery/src/asapo_discovery/utils/helpers.go +++ /dev/null @@ -1,38 +0,0 @@ -package utils - -import ( - json "encoding/json" - "io/ioutil" -) - -func StringInSlice(a string, list []string) bool { - for _, b := range list { - if b == a { - return true - } - } - return false -} - -func MapToJson(res interface{}) ([]byte, error) { - answer, err := json.Marshal(res) - if err == nil { - return answer, nil - } else { - return nil, err - } -} - -func ReadJsonFromFile(fname string, config interface{}) error { - content, err := ioutil.ReadFile(fname) - if err != nil { - return err - } - - err = json.Unmarshal(content, config) - if err != nil { - return err - } - - return nil -} diff --git a/examples/producer/dummy-data-producer/check_linux.sh b/examples/producer/dummy-data-producer/check_linux.sh index 53b37762008d7b426ccb52311c0a641e3656da47..c9601daa177e35f58479f84f2f9cc4ba3c7dc35e 100644 --- a/examples/producer/dummy-data-producer/check_linux.sh +++ b/examples/producer/dummy-data-producer/check_linux.sh @@ -12,7 +12,7 @@ rm -rf files mkdir files -$@ files 11 4 4 1 2>&1 | grep Rate +$@ files beamtime_id 11 4 4 1 10 2>&1 | grep Rate ls -ln files/0.bin | awk '{ print $5 }'| grep 11264 diff --git a/examples/producer/dummy-data-producer/check_windows.bat b/examples/producer/dummy-data-producer/check_windows.bat index da2b59829a4f1ca31fb394839fd93be41419425b..6270913bc8fc6118a3c7cc144ebf1467643dcb46 100644 --- a/examples/producer/dummy-data-producer/check_windows.bat +++ b/examples/producer/dummy-data-producer/check_windows.bat @@ -2,7 +2,7 @@ SET folder=files mkdir %folder% -"%1" %folder% 11 4 4 1 2>&1 | findstr "Rate" || goto :error +"%1" %folder% beamtime_id 11 4 4 1 10 2>&1 | findstr "Rate" || goto :error FOR /F "usebackq" %%A IN ('%folder%\0.bin') DO set size=%%~zA if %size% NEQ 11264 goto :error diff --git a/examples/producer/dummy-data-producer/dummy_data_producer.cpp b/examples/producer/dummy-data-producer/dummy_data_producer.cpp index 1d55b247bbeef7e7b6b6c2ed4fb43353bbd73459..49769c2f73483957366cae0691991216effa94fc 100644 --- a/examples/producer/dummy-data-producer/dummy_data_producer.cpp +++ b/examples/producer/dummy-data-producer/dummy_data_producer.cpp @@ -14,35 +14,43 @@ int iterations_remained; struct Args { std::string receiver_address; + std::string beamtime_id; size_t number_of_bytes; uint64_t iterations; uint64_t nthreads; uint64_t mode; + uint64_t timeout_sec; }; void PrintCommandArguments(const Args& args) { std::cout << "receiver_address: " << args.receiver_address << std::endl + << "beamtime_id: " << args.beamtime_id << std::endl << "Package size: " << args.number_of_bytes / 1024 << "k" << std::endl << "iterations: " << args.iterations << std::endl << "nthreads: " << args.nthreads << std::endl << "mode: " << args.mode << std::endl + << "timeout: " << args.timeout_sec << std::endl << std::endl; } void ProcessCommandArguments(int argc, char* argv[], Args* args) { - if (argc != 6) { + if (argc != 8) { std::cout << - "Usage: " << argv[0] << " <destination> <number_of_byte> <iterations> <nthreads> <mode 0 -t tcp, 1 - filesystem>" + "Usage: " << argv[0] << + " <destination> <beamtime_id> <number_of_byte> <iterations> <nthreads>" + " <mode 0 -t tcp, 1 - filesystem> <timeout (sec)>" << std::endl; exit(EXIT_FAILURE); } try { args->receiver_address = argv[1]; - args->number_of_bytes = std::stoull(argv[2]) * 1024; - args->iterations = std::stoull(argv[3]); - args->nthreads = std::stoull(argv[4]); - args->mode = std::stoull(argv[5]); + args->beamtime_id = argv[2]; + args->number_of_bytes = std::stoull(argv[3]) * 1024; + args->iterations = std::stoull(argv[4]); + args->nthreads = std::stoull(argv[5]); + args->mode = std::stoull(argv[6]); + args->timeout_sec = std::stoull(argv[7]); PrintCommandArguments(*args); return; } catch(std::exception& e) { @@ -78,7 +86,8 @@ bool SendDummyData(asapo::Producer* producer, uint8_t* data, size_t number_of_by std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) { asapo::Error err; auto producer = asapo::Producer::Create(args.receiver_address, args.nthreads, - args.mode == 0 ? asapo::RequestHandlerType::kTcp : asapo::RequestHandlerType::kFilesystem, &err); + args.mode == 0 ? asapo::RequestHandlerType::kTcp : asapo::RequestHandlerType::kFilesystem, + args.beamtime_id, &err); if(err) { std::cerr << "Cannot start producer. ProducerError: " << err << std::endl; exit(EXIT_FAILURE); @@ -91,7 +100,6 @@ std::unique_ptr<asapo::Producer> CreateProducer(const Args& args) { void WaitThreadsFinished(const Args& args) { uint64_t elapsed_ms = 0; - uint64_t timeout_sec = 3000; while (true) { mutex.lock(); if (iterations_remained <= 0) { @@ -101,7 +109,7 @@ void WaitThreadsFinished(const Args& args) { mutex.unlock(); std::this_thread::sleep_for(std::chrono::milliseconds(100)); elapsed_ms += 100; - if (elapsed_ms > timeout_sec * 1000) { + if (elapsed_ms > args.timeout_sec * 1000) { std::cerr << "Exit on timeout " << std::endl; exit(EXIT_FAILURE); } diff --git a/examples/worker/getnext_broker/check_linux.sh b/examples/worker/getnext_broker/check_linux.sh index 18fe844f51ae3e6f40943fe0678f0cb0e7252d0e..8455966d010beea4c21e85df5d25753e8f8ff15a 100644 --- a/examples/worker/getnext_broker/check_linux.sh +++ b/examples/worker/getnext_broker/check_linux.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash database_name=test_run +token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo= set -e @@ -23,5 +24,7 @@ do echo 'db.data.insert({"_id":'$i',"size":100,"name":"'$i'","lastchange":1})' | mongo ${database_name} done -$@ 127.0.0.1:8400 $database_name 2 | grep "Processed 3 file(s)" +$@ 127.0.0.1:8400 $database_name 2 $token_test_run | grep "Processed 3 file(s)" + + diff --git a/examples/worker/getnext_broker/check_windows.bat b/examples/worker/getnext_broker/check_windows.bat index 891e876adee4f71d609df9bb626e90b339aa1d2f..d96b652d8e7993e7bff84a30842a492ca3a5760c 100644 --- a/examples/worker/getnext_broker/check_windows.bat +++ b/examples/worker/getnext_broker/check_windows.bat @@ -1,5 +1,6 @@ SET database_name=test_run SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe" +set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo= c:\opt\consul\nomad run discovery.nmd c:\opt\consul\nomad run broker.nmd @@ -10,7 +11,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul for /l %%x in (1, 1, 3) do echo db.data.insert({"_id":%%x,"size":100,"name":"%%x","lastchange":1}) | %mongo_exe% %database_name% || goto :error -"%1" 127.0.0.1:8400 %database_name% 1 | findstr /c:"Processed 3 file" || goto :error +"%1" 127.0.0.1:8400 %database_name% 1 %token_test_run% | findstr /c:"Processed 3 file" || goto :error goto :clean :error diff --git a/examples/worker/getnext_broker/getnext_broker.cpp b/examples/worker/getnext_broker/getnext_broker.cpp index 8655fdbb454a2f3ed04dbd871bbe5e3baffd5f94..46648a2db695c8f0d77ae76f51c859561a3b91b0 100644 --- a/examples/worker/getnext_broker/getnext_broker.cpp +++ b/examples/worker/getnext_broker/getnext_broker.cpp @@ -12,6 +12,13 @@ using std::chrono::high_resolution_clock; using asapo::Error; +struct Params { + std::string server; + std::string beamtime_id; + std::string token; + int nthreads; +}; + void WaitThreads(std::vector<std::thread>* threads) { for (auto& thread : *threads) { thread.join(); @@ -28,12 +35,11 @@ int ProcessError(const Error& err) { return 0; } -std::vector<std::thread> StartThreads(const std::string& server, const std::string& run_name, int nthreads, - std::vector<int>* nfiles, std::vector<int>* errors) { - auto exec_next = [server, run_name, nfiles, errors](int i) { +std::vector<std::thread> StartThreads(const Params& params, std::vector<int>* nfiles, std::vector<int>* errors) { + auto exec_next = [¶ms, nfiles, errors](int i) { asapo::FileInfo fi; Error err; - auto broker = asapo::DataBrokerFactory::CreateServerBroker(server, run_name, &err); + auto broker = asapo::DataBrokerFactory::CreateServerBroker(params.server, params.beamtime_id, params.token, &err); broker->SetTimeout(10000); while ((err = broker->GetNext(&fi, nullptr)) == nullptr) { (*nfiles)[i] ++; @@ -42,24 +48,25 @@ std::vector<std::thread> StartThreads(const std::string& server, const std::stri }; std::vector<std::thread> threads; - for (int i = 0; i < nthreads; i++) { + for (int i = 0; i < params.nthreads; i++) { threads.emplace_back(std::thread(exec_next, i)); } return threads; } -int ReadAllData(const std::string& server, const std::string& run_name, int nthreads, uint64_t* duration_ms) { +int ReadAllData(const Params& params, uint64_t* duration_ms) { asapo::FileInfo fi; high_resolution_clock::time_point t1 = high_resolution_clock::now(); - std::vector<int>nfiles(nthreads, 0); - std::vector<int>errors(nthreads, 0); + std::vector<int>nfiles(params.nthreads, 0); + std::vector<int>errors(params.nthreads, 0); - auto threads = StartThreads(server, run_name, nthreads, &nfiles, &errors); + auto threads = StartThreads(params, &nfiles, &errors); WaitThreads(&threads); int n_total = std::accumulate(nfiles.begin(), nfiles.end(), 0); int errors_total = std::accumulate(errors.begin(), errors.end(), 0); + if (errors_total) { exit(EXIT_FAILURE); } @@ -71,17 +78,18 @@ int ReadAllData(const std::string& server, const std::string& run_name, int nthr } int main(int argc, char* argv[]) { - if (argc != 4) { - std::cout << "Usage: " + std::string{argv[0]} +" <server> <run_name> <nthreads>" << std::endl; + if (argc != 5) { + std::cout << "Usage: " + std::string{argv[0]} +" <server> <run_name> <nthreads> <token>" << std::endl; exit(EXIT_FAILURE); } - std::string server = std::string{argv[1]}; - std::string run_name = std::string{argv[2]}; - int nthreads = atoi(argv[3]); - + Params params; + params.server = std::string{argv[1]}; + params.beamtime_id = std::string{argv[2]}; + params.nthreads = atoi(argv[3]); + params.token = std::string{argv[4]}; uint64_t duration_ms; - auto nfiles = ReadAllData(server, run_name, nthreads, &duration_ms); + auto nfiles = ReadAllData(params, &duration_ms); std::cout << "Processed " << nfiles << " file(s)" << std::endl; std::cout << "Elapsed : " << duration_ms << "ms" << std::endl; diff --git a/producer/api/include/producer/producer.h b/producer/api/include/producer/producer.h index 303f79d295d519ae0a93aa75979da2beef3cec19..8b9cc16d6b984353b0aae3ec9568ba4dc5f526fd 100644 --- a/producer/api/include/producer/producer.h +++ b/producer/api/include/producer/producer.h @@ -17,7 +17,7 @@ class Producer { * @return A unique_ptr to a new producer instance */ static std::unique_ptr<Producer> Create(const std::string& endpoint, uint8_t n_processing_threads, - asapo::RequestHandlerType type, + asapo::RequestHandlerType type, std::string beamtime_id, Error* err); virtual ~Producer() = default; @@ -37,6 +37,8 @@ class Producer { virtual void EnableLocalLog(bool enable) = 0; //! Enables/Disables sending logs to the central server virtual void EnableRemoteLog(bool enable) = 0; + //! Set beamtime id which producer will use to send data + virtual Error SetBeamtimeId(std::string beamtime_id) = 0; }; } diff --git a/producer/api/include/producer/producer_error.h b/producer/api/include/producer/producer_error.h index 5a0641b7dd8c7ad1ee1159fee85fee954ebbfa5e..5ed0cdc1e8c9f4cbde03d4fde62af41a041b2927 100644 --- a/producer/api/include/producer/producer_error.h +++ b/producer/api/include/producer/producer_error.h @@ -9,7 +9,11 @@ enum class ProducerErrorType { kAlreadyConnected, kConnectionNotReady, kFileTooLarge, + kFileNameTooLong, + kBeamtimeIdTooLong, + kBeamtimeAlreadySet, kFileIdAlreadyInUse, + kAuthorizationFailed, kInternalServerError, kCannotSendDataToReceivers, kRequestPoolIsFull @@ -70,10 +74,28 @@ auto const kFileTooLarge = ProducerErrorTemplate { "File too large", ProducerErrorType::kFileTooLarge }; +auto const kFileNameTooLong = ProducerErrorTemplate { + "filename too long", ProducerErrorType::kFileNameTooLong +}; + +auto const kBeamtimeIdTooLong = ProducerErrorTemplate { + "beamtime id too long", ProducerErrorType::kBeamtimeIdTooLong +}; + + +auto const kBeamtimeAlreadySet = ProducerErrorTemplate { + "beamtime id already set", ProducerErrorType::kBeamtimeAlreadySet +}; + + auto const kFileIdAlreadyInUse = ProducerErrorTemplate { "File already in use", ProducerErrorType::kFileIdAlreadyInUse }; +auto const kAuthorizationFailed = ProducerErrorTemplate { + "Authorization failed:", ProducerErrorType::kAuthorizationFailed +}; + auto const kInternalServerError = ProducerErrorTemplate { "Internal server error", ProducerErrorType::kInternalServerError }; diff --git a/producer/api/src/producer.cpp b/producer/api/src/producer.cpp index a8e3e38b4ab8d124dc84f17558f122ac079b54ac..ac81876460a84a53313fef72596cdd545f86cdfe 100644 --- a/producer/api/src/producer.cpp +++ b/producer/api/src/producer.cpp @@ -2,15 +2,15 @@ #include "producer_impl.h" std::unique_ptr<asapo::Producer> asapo::Producer::Create(const std::string& endpoint, uint8_t n_processing_threads, - asapo::RequestHandlerType type, Error* err) { + asapo::RequestHandlerType type, std::string beamtime_id, Error* err) { if (n_processing_threads > kMaxProcessingThreads) { *err = TextError("Too many processing threads: " + std::to_string(n_processing_threads)); return nullptr; } + std::unique_ptr<asapo::Producer> producer; try { - *err = nullptr; - return std::unique_ptr<asapo::Producer>(new ProducerImpl(endpoint, n_processing_threads, type)); + producer.reset(new ProducerImpl(endpoint, n_processing_threads, type)); } catch (const std::exception& ex) { *err = TextError(ex.what()); return nullptr; @@ -18,4 +18,10 @@ std::unique_ptr<asapo::Producer> asapo::Producer::Create(const std::string& endp *err = TextError("Unknown exception in producer_api "); return nullptr; } + + *err = producer->SetBeamtimeId(beamtime_id); + if (*err) { + return nullptr; + } + return producer; } diff --git a/producer/api/src/producer_impl.cpp b/producer/api/src/producer_impl.cpp index 59ddc768640373c0c63d2ce6661724feff7ef2fc..fb592648069422d9d914fc587b087d26a4cb6d45 100644 --- a/producer/api/src/producer_impl.cpp +++ b/producer/api/src/producer_impl.cpp @@ -32,25 +32,33 @@ GenericRequestHeader ProducerImpl::GenerateNextSendRequest(uint64_t file_id, siz return request; } -Error CheckProducerRequest(const GenericRequestHeader header) { - if (header.data_size > ProducerImpl::kMaxChunkSize) { +Error CheckProducerRequest(size_t file_size, size_t filename_size) { + if (file_size > ProducerImpl::kMaxChunkSize) { return ProducerErrorTemplates::kFileTooLarge.Generate(); } + if (filename_size > kMaxMessageSize) { + return ProducerErrorTemplates::kFileNameTooLong.Generate(); + } + return nullptr; } Error ProducerImpl::Send(uint64_t file_id, const void* data, size_t file_size, std::string file_name, RequestCallback callback) { - auto request_header = GenerateNextSendRequest(file_id, file_size, std::move(file_name)); - auto err = CheckProducerRequest(request_header); + auto err = CheckProducerRequest(file_size, file_name.size()); if (err) { + log__->Error("error checking request - " + err->Explain()); return err; } - return request_pool__->AddRequest(std::unique_ptr<Request> {new Request{request_header, data, callback}}); + + auto request_header = GenerateNextSendRequest(file_id, file_size, std::move(file_name)); + + + return request_pool__->AddRequest(std::unique_ptr<Request> {new Request{beamtime_id_, request_header, data, callback}}); } void ProducerImpl::SetLogLevel(LogLevel level) { @@ -65,4 +73,20 @@ void ProducerImpl::EnableRemoteLog(bool enable) { log__->EnableRemoteLog(enable); } +Error ProducerImpl::SetBeamtimeId(std::string beamtime_id) { + + if (!beamtime_id_.empty()) { + log__->Error("beamtime_id already set"); + return ProducerErrorTemplates::kBeamtimeAlreadySet.Generate(); + } + + if (beamtime_id.size() > kMaxMessageSize) { + log__->Error("beamtime_id is too long - " + beamtime_id); + return ProducerErrorTemplates::kBeamtimeIdTooLong.Generate(); + } + + beamtime_id_ = std::move(beamtime_id); + return nullptr; +} + } \ No newline at end of file diff --git a/producer/api/src/producer_impl.h b/producer/api/src/producer_impl.h index b675c11adae980f08277a92511221370f9c56c89..e76b93c27b65eb8a3f8fb5595c1a78bbe08a4162 100644 --- a/producer/api/src/producer_impl.h +++ b/producer/api/src/producer_impl.h @@ -32,8 +32,11 @@ class ProducerImpl : public Producer { RequestCallback callback) override; AbstractLogger* log__; std::unique_ptr<RequestPool> request_pool__; + Error SetBeamtimeId(std::string beamtime_id) override; + private: GenericRequestHeader GenerateNextSendRequest(uint64_t file_id, size_t file_size, std::string file_name); + std::string beamtime_id_; }; Error CheckProducerRequest(const GenericRequestHeader header); diff --git a/producer/api/src/request.h b/producer/api/src/request.h index 4c0a7ccdd4867bdf8d019e43634adaf5c69e01e1..59c7589f9c93011d85a730b46bf3594ef6868c74 100644 --- a/producer/api/src/request.h +++ b/producer/api/src/request.h @@ -7,6 +7,7 @@ namespace asapo { struct Request { + std::string beamtime_id; GenericRequestHeader header; const void* data; RequestCallback callback; diff --git a/producer/api/src/request_handler_filesystem.cpp b/producer/api/src/request_handler_filesystem.cpp index 93ac0b49945030e02dbee22feec49ccaebb3406c..e43acd01fc0ab9efe18b22b40674bf9cb9775c3d 100644 --- a/producer/api/src/request_handler_filesystem.cpp +++ b/producer/api/src/request_handler_filesystem.cpp @@ -14,7 +14,7 @@ RequestHandlerFilesystem::RequestHandlerFilesystem(std::string destination_folde } Error RequestHandlerFilesystem::ProcessRequestUnlocked(const Request* request) { - std::string fullpath = destination_folder_ + "/" + request->header.file_name + ".bin"; + std::string fullpath = destination_folder_ + "/" + request->header.message + ".bin"; auto err = io__->WriteDataToFile(fullpath, (uint8_t*)request->data, request->header.data_size); if (request->callback) { request->callback(request->header, std::move(err)); diff --git a/producer/api/src/request_handler_tcp.cpp b/producer/api/src/request_handler_tcp.cpp index e85d51645444fa3b9e81912e554853a0d356800d..043b1881a1cc1d5b4b1b4ad741d4d00c11da3dd0 100644 --- a/producer/api/src/request_handler_tcp.cpp +++ b/producer/api/src/request_handler_tcp.cpp @@ -14,15 +14,37 @@ RequestHandlerTcp::RequestHandlerTcp(ReceiverDiscoveryService* discovery_service } -Error RequestHandlerTcp::ConnectToReceiver(const std::string& receiver_address) { +Error RequestHandlerTcp::Authorize(const std::string& beamtime_id) { + GenericRequestHeader header{kOpcodeAuthorize, 0, 0, beamtime_id.c_str()}; Error err; + io__->Send(sd_, &header, sizeof(header), &err); + if(err) { + return err; + } + return ReceiveResponse(); +} + + +Error RequestHandlerTcp::ConnectToReceiver(const std::string& beamtime_id, const std::string& receiver_address) { + Error err; + sd_ = io__->CreateAndConnectIPTCPSocket(receiver_address, &err); if(err != nullptr) { log__->Debug("cannot connect to receiver at " + receiver_address + " - " + err->Explain()); return err; } log__->Info("connected to receiver at " + receiver_address); + connected_receiver_uri_ = receiver_address; + err = Authorize(beamtime_id); + if (err != nullptr) { + log__->Error("authorization failed at " + receiver_address + " - " + err->Explain()); + Disconnect(); + return err; + } + + log__->Debug("authorized at " + receiver_address); + return nullptr; } @@ -48,14 +70,19 @@ Error RequestHandlerTcp::ReceiveResponse() { if(err != nullptr) { return err; } - - if(sendDataResponse.error_code) { - if(sendDataResponse.error_code == kNetErrorFileIdAlreadyInUse) { - return ProducerErrorTemplates::kFileIdAlreadyInUse.Generate(); - } + switch (sendDataResponse.error_code) { + case kNetErrorFileIdAlreadyInUse : + return ProducerErrorTemplates::kFileIdAlreadyInUse.Generate(); + case kNetAuthorizationError : { + auto res_err = ProducerErrorTemplates::kAuthorizationFailed.Generate(); + res_err->Append(sendDataResponse.message); + return res_err; + } + case kNetErrorNoError : + return nullptr; + default: return ProducerErrorTemplates::kInternalServerError.Generate(); } - return nullptr; } Error RequestHandlerTcp::TrySendToReceiver(const Request* request) { @@ -137,7 +164,7 @@ Error RequestHandlerTcp::ProcessRequestUnlocked(const Request* request) { } for (auto receiver_uri : receivers_list_) { if (Disconnected()) { - auto err = ConnectToReceiver(receiver_uri); + auto err = ConnectToReceiver(request->beamtime_id, receiver_uri); if (err != nullptr ) continue; } diff --git a/producer/api/src/request_handler_tcp.h b/producer/api/src/request_handler_tcp.h index 08a02132c6aae4ac2f7d38eee9abf8aef7646013..c89b282a775351f50152468a9fe9b475fd1692e8 100644 --- a/producer/api/src/request_handler_tcp.h +++ b/producer/api/src/request_handler_tcp.h @@ -29,7 +29,8 @@ class RequestHandlerTcp: public RequestHandler { const AbstractLogger* log__; ReceiverDiscoveryService* discovery_service__; private: - Error ConnectToReceiver(const std::string& receiver_address); + Error Authorize(const std::string& beamtime_id); + Error ConnectToReceiver(const std::string& beamtime_id, const std::string& receiver_address); Error SendHeaderAndData(const Request*); Error ReceiveResponse(); Error TrySendToReceiver(const Request* request); diff --git a/producer/api/unittests/test_producer.cpp b/producer/api/unittests/test_producer.cpp index 941456e79f54b9901e2a4f2b6280b255554bf205..f15706983f5c6a8e83ca5bcc096cfab3427b93d5 100644 --- a/producer/api/unittests/test_producer.cpp +++ b/producer/api/unittests/test_producer.cpp @@ -3,6 +3,7 @@ #include "producer/producer.h" #include "../src/producer_impl.h" +#include "producer/producer_error.h" using ::testing::Ne; using ::testing::Eq; @@ -12,15 +13,25 @@ namespace { TEST(CreateProducer, TcpProducer) { asapo::Error err; std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp, - &err); + "bt", &err); ASSERT_THAT(dynamic_cast<asapo::ProducerImpl*>(producer.get()), Ne(nullptr)); ASSERT_THAT(err, Eq(nullptr)); } +TEST(CreateProducer, ErrorBeamtime) { + asapo::Error err; + std::string expected_beamtimeid(asapo::kMaxMessageSize * 10, 'a'); + std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, asapo::RequestHandlerType::kTcp, + expected_beamtimeid, &err); + ASSERT_THAT(producer, Eq(nullptr)); + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kBeamtimeIdTooLong)); +} + + TEST(CreateProducer, FileSystemProducer) { asapo::Error err; std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("endpoint", 4, - asapo::RequestHandlerType::kFilesystem, &err); + asapo::RequestHandlerType::kFilesystem, "bt", &err); ASSERT_THAT(dynamic_cast<asapo::ProducerImpl*>(producer.get()), Ne(nullptr)); ASSERT_THAT(err, Eq(nullptr)); } @@ -29,14 +40,15 @@ TEST(CreateProducer, FileSystemProducer) { TEST(CreateProducer, TooManyThreads) { asapo::Error err; std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("", asapo::kMaxProcessingThreads + 1, - asapo::RequestHandlerType::kTcp, &err); + asapo::RequestHandlerType::kTcp, "bt", &err); ASSERT_THAT(producer, Eq(nullptr)); ASSERT_THAT(err, Ne(nullptr)); } TEST(Producer, SimpleWorkflowWihoutConnection) { asapo::Error err; - std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("hello", 5, asapo::RequestHandlerType::kTcp, &err); + std::unique_ptr<asapo::Producer> producer = asapo::Producer::Create("hello", 5, asapo::RequestHandlerType::kTcp, "bt", + &err); auto err_send = producer->Send(1, nullptr, 1, "", nullptr); std::this_thread::sleep_for(std::chrono::milliseconds(100)); ASSERT_THAT(producer, Ne(nullptr)); diff --git a/producer/api/unittests/test_producer_impl.cpp b/producer/api/unittests/test_producer_impl.cpp index 084557fb3b41619621ead0daf8323e9905a6ad2f..10e0d5f37056950486c057258e9cd7331ba436d6 100644 --- a/producer/api/unittests/test_producer_impl.cpp +++ b/producer/api/unittests/test_producer_impl.cpp @@ -30,12 +30,13 @@ using asapo::RequestPool; using asapo::Request; -MATCHER_P3(M_CheckSendDataRequest, file_id, file_size, file_name, +MATCHER_P5(M_CheckSendDataRequest, op_code, beamtime_id, file_id, file_size, message, "Checks if a valid GenericRequestHeader was Send") { - return ((asapo::GenericRequestHeader*)arg)->op_code == asapo::kOpcodeTransferData - && ((asapo::GenericRequestHeader*)arg)->data_id == file_id - && std::string(((asapo::GenericRequestHeader*)arg)->file_name) == file_name - && ((asapo::GenericRequestHeader*)arg)->data_size == file_size; + return ((asapo::GenericRequestHeader)(arg->header)).op_code == op_code + && ((asapo::GenericRequestHeader)(arg->header)).data_id == file_id + && ((asapo::GenericRequestHeader)(arg->header)).data_size == file_size + && arg->beamtime_id == beamtime_id + && strcmp(((asapo::GenericRequestHeader)(arg->header)).message, message) == 0; } @@ -68,8 +69,18 @@ TEST_F(ProducerImplTests, SendReturnsError) { ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kRequestPoolIsFull)); } +TEST_F(ProducerImplTests, ErrorIfFileNameTooLong) { + std::string long_string(asapo::kMaxMessageSize + 100, 'a'); + auto err = producer.Send(1, nullptr, 1, long_string, nullptr); + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kFileNameTooLong)); +} + + TEST_F(ProducerImplTests, ErrorIfSizeTooLarge) { + EXPECT_CALL(mock_logger, Error(testing::HasSubstr("error checking"))); + auto err = producer.Send(1, nullptr, asapo::ProducerImpl::kMaxChunkSize + 1, "", nullptr); + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kFileTooLarge)); } @@ -77,13 +88,15 @@ TEST_F(ProducerImplTests, ErrorIfSizeTooLarge) { TEST_F(ProducerImplTests, OKSendingRequest) { uint64_t expected_size = 100; uint64_t expected_id = 10; - std::string expected_name = "test_name"; - + char expected_name[asapo::kMaxMessageSize] = "test_name"; + std::string expected_beamtimeid = "beamtime_id"; - Request request{asapo::GenericRequestHeader{asapo::kOpcodeTransferData, expected_id, expected_size, expected_name}, nullptr, nullptr}; + producer.SetBeamtimeId(expected_beamtimeid); + Request request{"", asapo::GenericRequestHeader{asapo::kOpcodeTransferData, expected_id, expected_size, expected_name}, nullptr, nullptr}; - EXPECT_CALL(mock_pull, AddRequest_t(M_CheckSendDataRequest(expected_id, expected_size, expected_name))).WillOnce(Return( - nullptr)); + EXPECT_CALL(mock_pull, AddRequest_t(M_CheckSendDataRequest(asapo::kOpcodeTransferData, + expected_beamtimeid, expected_id, expected_size, expected_name))).WillOnce(Return( + nullptr)); auto err = producer.Send(expected_id, nullptr, expected_size, expected_name, nullptr); @@ -91,4 +104,24 @@ TEST_F(ProducerImplTests, OKSendingRequest) { } +TEST_F(ProducerImplTests, ErrorSettingBeamtime) { + std::string expected_beamtimeid(asapo::kMaxMessageSize * 10, 'a'); + EXPECT_CALL(mock_logger, Error(testing::HasSubstr("too long"))); + + auto err = producer.SetBeamtimeId(expected_beamtimeid); + + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kBeamtimeIdTooLong)); +} + +TEST_F(ProducerImplTests, ErrorSettingSecondTime) { + EXPECT_CALL(mock_logger, Error(testing::HasSubstr("already"))); + + producer.SetBeamtimeId("1"); + auto err = producer.SetBeamtimeId("2"); + + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kBeamtimeAlreadySet)); +} + + + } diff --git a/producer/api/unittests/test_request_handler_filesystem.cpp b/producer/api/unittests/test_request_handler_filesystem.cpp index 7cee0ad2410d6dcb5ee78b9783c9aee690433d0e..eea5ab2b11340e5a7611ed6f48d9b43a1a4aa494 100644 --- a/producer/api/unittests/test_request_handler_filesystem.cpp +++ b/producer/api/unittests/test_request_handler_filesystem.cpp @@ -52,13 +52,13 @@ class RequestHandlerFilesystemTests : public testing::Test { asapo::GenericRequestHeader header{expected_op_code, expected_file_id, expected_file_size, expected_file_name}; bool called = false; asapo::GenericRequestHeader callback_header; - asapo::Request request{header, expected_data_pointer, [this](asapo::GenericRequestHeader header, asapo::Error err) { + asapo::Request request{"", header, expected_data_pointer, [this](asapo::GenericRequestHeader header, asapo::Error err) { called = true; callback_err = std::move(err); callback_header = header; }}; - asapo::Request request_nocallback{header, expected_data_pointer, nullptr}; + asapo::Request request_nocallback{"", header, expected_data_pointer, nullptr}; testing::NiceMock<asapo::MockLogger> mock_logger; asapo::RequestHandlerFilesystem request_handler{expected_destination, expected_thread_id}; @@ -131,7 +131,7 @@ TEST_F(RequestHandlerFilesystemTests, TransferOK) { ASSERT_THAT(callback_header.data_size, Eq(header.data_size)); ASSERT_THAT(callback_header.op_code, Eq(header.op_code)); ASSERT_THAT(callback_header.data_id, Eq(header.data_id)); - ASSERT_THAT(std::string{callback_header.file_name}, Eq(std::string{header.file_name})); + ASSERT_THAT(std::string{callback_header.message}, Eq(std::string{header.message})); } diff --git a/producer/api/unittests/test_request_handler_tcp.cpp b/producer/api/unittests/test_request_handler_tcp.cpp index 7e8b3acb693525d21ee6b80724f1606218ec2bca..9a9d404abbba2df9de9ccdd99c9b242e5fe29355 100644 --- a/producer/api/unittests/test_request_handler_tcp.cpp +++ b/producer/api/unittests/test_request_handler_tcp.cpp @@ -30,6 +30,7 @@ using testing::NiceMock; using ::testing::InSequence; using ::testing::HasSubstr; +using ::testing::Sequence; TEST(RequestHandlerTcp, Constructor) { @@ -41,6 +42,7 @@ TEST(RequestHandlerTcp, Constructor) { ASSERT_THAT(request.discovery_service__, Eq(&ds)); } +std::string expected_auth_message = {"12345"}; class RequestHandlerTcpTests : public testing::Test { public: @@ -49,7 +51,9 @@ class RequestHandlerTcpTests : public testing::Test { uint64_t expected_file_id = 42; uint64_t expected_file_size = 1337; - std::string expected_file_name = "test_name"; + char expected_file_name[asapo::kMaxMessageSize] = "test_name"; + char expected_beamtime_id[asapo::kMaxMessageSize] = "test_beamtime_id"; + uint64_t expected_thread_id = 2; asapo::Opcode expected_op_code = asapo::kOpcodeTransferData; @@ -58,13 +62,13 @@ class RequestHandlerTcpTests : public testing::Test { asapo::GenericRequestHeader header{expected_op_code, expected_file_id, expected_file_size, expected_file_name}; bool called = false; asapo::GenericRequestHeader callback_header; - asapo::Request request{header, expected_file_pointer, [this](asapo::GenericRequestHeader header, asapo::Error err) { + asapo::Request request{expected_beamtime_id, header, expected_file_pointer, [this](asapo::GenericRequestHeader header, asapo::Error err) { called = true; callback_err = std::move(err); callback_header = header; }}; - asapo::Request request_nocallback{header, expected_file_pointer, nullptr}; + asapo::Request request_nocallback{expected_beamtime_id, header, expected_file_pointer, nullptr}; testing::NiceMock<asapo::MockLogger> mock_logger; uint64_t n_connections{0}; asapo::RequestHandlerTcp request_handler{&mock_discovery_service, expected_thread_id, &n_connections}; @@ -78,7 +82,10 @@ class RequestHandlerTcpTests : public testing::Test { std::vector<asapo::SocketDescriptor> expected_sds{83942, 83943}; + Sequence seq_receive; void ExpectFailConnect(bool only_once = false); + void ExpectFailAuthorize(bool only_once = false); + void ExpectOKAuthorize(bool only_once = false); void ExpectFailSendHeader(bool only_once = false); void ExpectFailSendData(bool only_once = false); void ExpectOKConnect(bool only_once = false); @@ -103,13 +110,15 @@ class RequestHandlerTcpTests : public testing::Test { ACTION_P(A_WriteSendDataResponse, error_code) { ((asapo::SendDataResponse*)arg1)->op_code = asapo::kOpcodeTransferData; ((asapo::SendDataResponse*)arg1)->error_code = error_code; + strcpy(((asapo::SendDataResponse*)arg1)->message, expected_auth_message.c_str()); } -MATCHER_P2(M_CheckSendDataRequest, file_id, file_size, +MATCHER_P4(M_CheckSendDataRequest, op_code, file_id, file_size, message, "Checks if a valid GenericRequestHeader was Send") { - return ((asapo::GenericRequestHeader*)arg)->op_code == asapo::kOpcodeTransferData + return ((asapo::GenericRequestHeader*)arg)->op_code == op_code && ((asapo::GenericRequestHeader*)arg)->data_id == file_id - && ((asapo::GenericRequestHeader*)arg)->data_size == file_size; + && ((asapo::GenericRequestHeader*)arg)->data_size == file_size + && strcmp(((asapo::GenericRequestHeader*)arg)->message, message) == 0; } @@ -132,11 +141,76 @@ void RequestHandlerTcpTests::ExpectFailConnect(bool only_once) { } + +void RequestHandlerTcpTests::ExpectFailAuthorize(bool only_once) { + int i = 0; + for (auto expected_sd : expected_sds) { + EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(asapo::kOpcodeAuthorize, 0, 0, expected_beamtime_id), + sizeof(asapo::GenericRequestHeader), _)) + .WillOnce( + DoAll( + testing::SetArgPointee<3>(nullptr), + Return(sizeof(asapo::GenericRequestHeader)) + )); + + EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendDataResponse), _)) + .InSequence(seq_receive) + .WillOnce( + DoAll( + testing::SetArgPointee<3>(nullptr), + A_WriteSendDataResponse(asapo::kNetAuthorizationError), + testing::ReturnArg<2>() + )); + EXPECT_CALL(mock_io, CloseSocket_t(expected_sd, _)); + EXPECT_CALL(mock_logger, Error(AllOf( + HasSubstr("authorization"), + HasSubstr(expected_auth_message), + HasSubstr(receivers_list[i]) + ) + )); + if (only_once) break; + i++; + } +} + +void RequestHandlerTcpTests::ExpectOKAuthorize(bool only_once) { + int i = 0; + for (auto expected_sd : expected_sds) { + EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(asapo::kOpcodeAuthorize, 0, 0, expected_beamtime_id), + sizeof(asapo::GenericRequestHeader), _)) + .WillOnce( + DoAll( + testing::SetArgPointee<3>(nullptr), + Return(sizeof(asapo::GenericRequestHeader)) + )); + + + EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendDataResponse), _)) + .InSequence(seq_receive) + .WillOnce( + DoAll( + testing::SetArgPointee<3>(nullptr), + A_WriteSendDataResponse(asapo::kNetErrorNoError), + testing::ReturnArg<2>() + )); + EXPECT_CALL(mock_logger, Debug(AllOf( + HasSubstr("authorized"), + HasSubstr(receivers_list[i]) + ) + )); + if (only_once) break; + i++; + } + +} + + + void RequestHandlerTcpTests::ExpectFailSendHeader(bool only_once) { int i = 0; for (auto expected_sd : expected_sds) { - EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(expected_file_id, - expected_file_size), + EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(asapo::kOpcodeTransferData, expected_file_id, + expected_file_size, expected_file_name), sizeof(asapo::GenericRequestHeader), _)) .WillOnce( DoAll( @@ -194,7 +268,7 @@ void RequestHandlerTcpTests::ExpectFailReceive(bool only_once) { int i = 0; for (auto expected_sd : expected_sds) { EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendDataResponse), _)) - .Times(1) + .InSequence(seq_receive) .WillOnce( DoAll( testing::SetArgPointee<3>(asapo::IOErrorTemplates::kBadFileNumber.Generate().release()), @@ -238,8 +312,8 @@ void RequestHandlerTcpTests::ExpectOKSendData(bool only_once) { void RequestHandlerTcpTests::ExpectOKSendHeader(bool only_once) { for (auto expected_sd : expected_sds) { - EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(expected_file_id, - expected_file_size), + EXPECT_CALL(mock_io, Send_t(expected_sd, M_CheckSendDataRequest(asapo::kOpcodeTransferData, expected_file_id, + expected_file_size, expected_file_name), sizeof(asapo::GenericRequestHeader), _)) .WillOnce( DoAll( @@ -276,6 +350,7 @@ void RequestHandlerTcpTests::ExpectOKReceive(bool only_once) { int i = 0; for (auto expected_sd : expected_sds) { EXPECT_CALL(mock_io, Receive_t(expected_sd, _, sizeof(asapo::SendDataResponse), _)) + .InSequence(seq_receive) .WillOnce( DoAll( testing::SetArgPointee<3>(nullptr), @@ -293,7 +368,11 @@ void RequestHandlerTcpTests::ExpectOKReceive(bool only_once) { } void RequestHandlerTcpTests::DoSingleSend(bool connect, bool success) { - if (connect) ExpectOKConnect(true); + if (connect) { + ExpectOKConnect(true); + ExpectOKAuthorize(true); + } + ExpectOKSendHeader(true); ExpectOKSendData(true); if (success) { @@ -370,6 +449,19 @@ TEST_F(RequestHandlerTcpTests, TriesConnectWhenNotConnected) { ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kCannotSendDataToReceivers)); } +TEST_F(RequestHandlerTcpTests, FailsWhenCannotAuthorize) { + ExpectOKConnect(); + ExpectFailAuthorize(); + + request_handler.PrepareProcessingRequestLocked(); + auto err = request_handler.ProcessRequestUnlocked(&request); + request_handler.TearDownProcessingRequestLocked(err); + + ASSERT_THAT(err, Eq(asapo::ProducerErrorTemplates::kCannotSendDataToReceivers)); + ASSERT_THAT(n_connections, Eq(0)); +} + + TEST_F(RequestHandlerTcpTests, DoesNotTryConnectWhenConnected) { DoSingleSend(); @@ -392,6 +484,7 @@ TEST_F(RequestHandlerTcpTests, DoesNotTryConnectWhenConnected) { TEST_F(RequestHandlerTcpTests, DoNotCloseWhenNotConnected) { EXPECT_CALL(mock_io, CloseSocket_t(_, _)).Times(0); ExpectOKConnect(); + ExpectOKAuthorize(); ExpectFailSendHeader(); request_handler.PrepareProcessingRequestLocked(); @@ -419,6 +512,7 @@ TEST_F(RequestHandlerTcpTests, CloseConnectionWhenRebalance) { TEST_F(RequestHandlerTcpTests, ErrorWhenCannotSendHeader) { ExpectOKConnect(); + ExpectOKAuthorize(); ExpectFailSendHeader(); request_handler.PrepareProcessingRequestLocked(); @@ -430,6 +524,7 @@ TEST_F(RequestHandlerTcpTests, ErrorWhenCannotSendHeader) { TEST_F(RequestHandlerTcpTests, ErrorWhenCannotSendData) { ExpectOKConnect(); + ExpectOKAuthorize(); ExpectOKSendHeader(); ExpectFailSendData(); @@ -440,10 +535,14 @@ TEST_F(RequestHandlerTcpTests, ErrorWhenCannotSendData) { } TEST_F(RequestHandlerTcpTests, ErrorWhenCannotReceiveData) { - ExpectOKConnect(); - ExpectOKSendHeader(); - ExpectOKSendData(); - ExpectFailReceive(); + EXPECT_CALL(mock_discovery_service, RotatedUriList(_)). + WillOnce(Return(receivers_list_single)); + + ExpectOKConnect(true); + ExpectOKAuthorize(true); + ExpectOKSendHeader(true); + ExpectOKSendData(true); + ExpectFailReceive(true); request_handler.PrepareProcessingRequestLocked(); auto err = request_handler.ProcessRequestUnlocked(&request); @@ -453,10 +552,12 @@ TEST_F(RequestHandlerTcpTests, ErrorWhenCannotReceiveData) { TEST_F(RequestHandlerTcpTests, ImmediatelyCallBackErrorIfFileAlreadyInUse) { ExpectOKConnect(true); + ExpectOKAuthorize(true); ExpectOKSendHeader(true); ExpectOKSendData(true); EXPECT_CALL(mock_io, Receive_t(expected_sds[0], _, sizeof(asapo::SendDataResponse), _)) + .InSequence(seq_receive) .WillOnce( DoAll( testing::SetArgPointee<3>(nullptr), @@ -476,6 +577,7 @@ TEST_F(RequestHandlerTcpTests, ImmediatelyCallBackErrorIfFileAlreadyInUse) { TEST_F(RequestHandlerTcpTests, SendEmptyCallBack) { ExpectOKConnect(true); + ExpectOKAuthorize(true); ExpectOKSendHeader(true); ExpectOKSendData(true); ExpectOKReceive(); @@ -489,6 +591,7 @@ TEST_F(RequestHandlerTcpTests, SendEmptyCallBack) { TEST_F(RequestHandlerTcpTests, SendOK) { ExpectOKConnect(true); + ExpectOKAuthorize(true); ExpectOKSendHeader(true); ExpectOKSendData(true); ExpectOKReceive(); @@ -502,7 +605,7 @@ TEST_F(RequestHandlerTcpTests, SendOK) { ASSERT_THAT(callback_header.data_size, Eq(header.data_size)); ASSERT_THAT(callback_header.op_code, Eq(header.op_code)); ASSERT_THAT(callback_header.data_id, Eq(header.data_id)); - ASSERT_THAT(std::string{callback_header.file_name}, Eq(std::string{header.file_name})); + ASSERT_THAT(std::string{callback_header.message}, Eq(std::string{header.message})); } diff --git a/producer/api/unittests/test_request_pool.cpp b/producer/api/unittests/test_request_pool.cpp index 64a0b70e6d0b1066e01e586ba58ccefb5fc97535..6ec589c3d949d5fddc5a288b92c37cad683f8d04 100644 --- a/producer/api/unittests/test_request_pool.cpp +++ b/producer/api/unittests/test_request_pool.cpp @@ -62,7 +62,7 @@ class RequestPoolTests : public testing::Test { MockRequestHandlerFactory request_handler_factory{mock_request_handler}; const uint8_t nthreads = 1; asapo::RequestPool pool {nthreads, &request_handler_factory}; - std::unique_ptr<Request> request{new Request{GenericRequestHeader{}, nullptr, nullptr}}; + std::unique_ptr<Request> request{new Request{"", GenericRequestHeader{}, nullptr, nullptr}}; void SetUp() override { pool.log__ = &mock_logger; } @@ -118,7 +118,7 @@ TEST_F(RequestPoolTests, AddRequestCallsSend) { TEST_F(RequestPoolTests, AddRequestCallsSendTwoRequests) { - Request* request2 = new Request{GenericRequestHeader{}, nullptr, nullptr}; + Request* request2 = new Request{"", GenericRequestHeader{}, nullptr, nullptr}; ExpectSend(mock_request_handler, 2); diff --git a/receiver/CMakeLists.txt b/receiver/CMakeLists.txt index 13c0004060ca8f68396d58bed7ab2003f032abc2..c4b327d95e07c6c1eccdda2ed298abfe85f9b001 100644 --- a/receiver/CMakeLists.txt +++ b/receiver/CMakeLists.txt @@ -1,15 +1,18 @@ set(TARGET_NAME receiver) set(SOURCE_FILES - src/receiver.h src/receiver.cpp - src/connection.h src/connection.cpp - src/receiver_error.h + src/receiver.cpp + src/connection.cpp src/request.cpp src/request_handler_file_write.cpp src/statistics.cpp src/statistics_sender_influx_db.cpp src/receiver_config.cpp src/producer_logger.cpp - src/request_handler_db_write.cpp src/statistics_sender_fluentd.cpp src/statistics_sender_fluentd.h) + src/request_handler_db_write.cpp + src/request_handler_authorize.cpp + src/statistics_sender_fluentd.cpp + src/requests_dispatcher.cpp +) ################################ @@ -47,9 +50,11 @@ set(TEST_SOURCE_FILES unittests/test_request_factory.cpp unittests/test_request_handler_file_write.cpp unittests/test_request_handler_db_writer.cpp + unittests/test_request_handler_authorizer.cpp unittests/test_statistics_sender_influx_db.cpp unittests/test_statistics_sender_fluentd.cpp unittests/mock_receiver_config.cpp + unittests/test_requests_dispatcher.cpp ) # set(TEST_LIBRARIES "${TARGET_NAME};system_io") diff --git a/receiver/src/connection.cpp b/receiver/src/connection.cpp index 4c40b1742051f7be157c0997845441d8fa4156ea..0e30d5463fa6e698dba1911270334949a36973b2 100644 --- a/receiver/src/connection.cpp +++ b/receiver/src/connection.cpp @@ -8,49 +8,18 @@ namespace asapo { -size_t Connection::kRequestHandlerMaxBufferSize; -std::atomic<uint32_t> Connection::kNetworkProducerPeerImplGlobalCounter(0); - Connection::Connection(SocketDescriptor socket_fd, const std::string& address, - std::string receiver_tag): request_factory__{new RequestFactory}, -io__{GenerateDefaultIO()}, statistics__{new Statistics}, log__{GetDefaultReceiverLogger()} { + std::string receiver_tag) : + io__{GenerateDefaultIO()}, + statistics__{new Statistics}, + log__{GetDefaultReceiverLogger()}, +requests_dispatcher__{new RequestsDispatcher{socket_fd, address, statistics__.get()}} { socket_fd_ = socket_fd; - connection_id_ = kNetworkProducerPeerImplGlobalCounter++; address_ = address; statistics__->AddTag("connection_from", address); statistics__->AddTag("receiver_tag", std::move(receiver_tag)); - } -uint64_t Connection::GetId() const noexcept { - return connection_id_; -} - -NetworkErrorCode GetNetworkCodeFromError(const Error& err) { - if(err) { - if(err == IOErrorTemplates::kFileAlreadyExists) { - return NetworkErrorCode::kNetErrorFileIdAlreadyInUse; - } else { - return NetworkErrorCode::kNetErrorInternalServerError; - } - } - return NetworkErrorCode::kNetErrorNoError; -} - -Error Connection::ProcessRequest(const std::unique_ptr<Request>& request) const noexcept { - Error err; - err = request->Handle(&statistics__); - GenericNetworkResponse generic_response; - generic_response.error_code = GetNetworkCodeFromError(err); - if(err) { - log__->Error("error while processing request from " + address_ + " - " + err->Explain()); - } - io__->Send(socket_fd_, &generic_response, sizeof(GenericNetworkResponse), &err); - if(err) { - log__->Error("error sending response to " + address_ + " - " + err->Explain()); - } - return err; -} void Connection::ProcessStatisticsAfterRequest(const std::unique_ptr<Request>& request) const noexcept { @@ -61,18 +30,13 @@ void Connection::ProcessStatisticsAfterRequest(const std::unique_ptr<Request>& r } void Connection::Listen() const noexcept { - while(true) { + while (true) { Error err; - auto request = WaitForNewRequest(&err); - if(err) { - if (err != ErrorTemplates::kEndOfFile) { - log__->Error("error while waiting for request from " + address_ + " - " + err->Explain()); - } + auto request = requests_dispatcher__->GetNextRequest(&err); + if (err) { break; } - if (!request) continue; //no error, but timeout - log__->Debug("processing request from " + address_); - err = ProcessRequest(request); + err = requests_dispatcher__->ProcessRequest(request); if (err) { break; } @@ -84,20 +48,6 @@ void Connection::Listen() const noexcept { } -std::unique_ptr<Request> Connection::WaitForNewRequest(Error* err) const noexcept { - //TODO: to be overwritten with MessagePack (or similar) - GenericRequestHeader generic_request_header; - statistics__->StartTimer(StatisticEntity::kNetwork); - io__->ReceiveWithTimeout(socket_fd_, &generic_request_header, sizeof(GenericRequestHeader), 50, err); - if(*err) { - if(*err == IOErrorTemplates::kTimeout) { - *err = nullptr;//Not an error in this case - } - return nullptr; - } - statistics__->StopTimer(); - return request_factory__->GenerateRequest(generic_request_header, socket_fd_, err); } -} diff --git a/receiver/src/connection.h b/receiver/src/connection.h index 3cb73a789bec8055e997028fe37d18e384bd3efe..edeb7f9840f13d3408a2cde6250015d88c72f37a 100644 --- a/receiver/src/connection.h +++ b/receiver/src/connection.h @@ -16,32 +16,27 @@ #include "request.h" #include "statistics.h" #include "logger/logger.h" +#include "requests_dispatcher.h" namespace asapo { class Connection { public: private: - uint32_t connection_id_; std::string address_; int socket_fd_; public: - static size_t kRequestHandlerMaxBufferSize; - static std::atomic<uint32_t> kNetworkProducerPeerImplGlobalCounter; Connection(SocketDescriptor socket_fd, const std::string& address, std::string receiver_tag); ~Connection() = default; void Listen() const noexcept; - uint64_t GetId() const noexcept; - std::unique_ptr<RequestFactory> request_factory__; std::unique_ptr<IO> io__; mutable std::unique_ptr<Statistics> statistics__; const AbstractLogger* log__; + std::unique_ptr<RequestsDispatcher> requests_dispatcher__; private: - std::unique_ptr<Request> WaitForNewRequest(Error* err) const noexcept; - Error ProcessRequest(const std::unique_ptr<Request>& request) const noexcept; void ProcessStatisticsAfterRequest(const std::unique_ptr<Request>& request) const noexcept; }; diff --git a/receiver/src/receiver_config.cpp b/receiver/src/receiver_config.cpp index 98c5700c3b22b2abaaf68f5aab4687213ec3c6d7..d2eb57eb7e716b0305ec61762b1453998fac35fd 100644 --- a/receiver/src/receiver_config.cpp +++ b/receiver/src/receiver_config.cpp @@ -20,8 +20,9 @@ Error ReceiverConfigFactory::SetConfigFromFile(std::string file_name) { (err = parser.GetBool("WriteToDisk", &config.write_to_disk)) || (err = parser.GetBool("WriteToDb", &config.write_to_db)) || (err = parser.GetString("BrokerDbAddress", &config.broker_db_uri)) || - (err = parser.GetString("BrokerDbName", &config.broker_db_name)) || (err = parser.GetString("Tag", &config.tag)) || + (err = parser.GetString("AuthorizationServer", &config.authorization_server)) || + (err = parser.GetUInt64("AuthorizationInterval", &config.authorization_interval_ms)) || (err = parser.GetString("RootFolder", &config.root_folder)) || (err = parser.GetString("MonitorDbName", &config.monitor_db_name)); (err = parser.GetString("LogLevel", &log_level)); diff --git a/receiver/src/receiver_config.h b/receiver/src/receiver_config.h index bb5ed448e5fa0d7d3aa4a4d3e701267c5c2879af..d99d07617b84de3a53d5e0edbff06dec75a1e5cc 100644 --- a/receiver/src/receiver_config.h +++ b/receiver/src/receiver_config.h @@ -11,9 +11,10 @@ struct ReceiverConfig { std::string monitor_db_uri; std::string monitor_db_name; std::string broker_db_uri; - std::string broker_db_name; std::string root_folder; uint64_t listen_port = 0; + std::string authorization_server; + uint64_t authorization_interval_ms = 0; bool write_to_disk = false; bool write_to_db = false; LogLevel log_level = LogLevel::Info; diff --git a/receiver/src/receiver_error.h b/receiver/src/receiver_error.h index 6af4287ebd29b981de49d42df436fa1ea7c95f6f..c90a0d55531c754800df049b26dadd91e60979ea 100644 --- a/receiver/src/receiver_error.h +++ b/receiver/src/receiver_error.h @@ -7,7 +7,8 @@ namespace asapo { enum class ReceiverErrorType { kInvalidOpCode, - kBadRequest + kBadRequest, + kAuthorizationFailure }; //TODO Make a marco to create error class and error template class @@ -61,6 +62,10 @@ auto const kBadRequest = ReceiverErrorTemplate { "Bad request", ReceiverErrorType::kBadRequest }; +auto const kAuthorizationFailure = ReceiverErrorTemplate { + "authorization failure", ReceiverErrorType::kAuthorizationFailure +}; + }; } diff --git a/receiver/src/request.cpp b/receiver/src/request.cpp index 1a836e9f67e2f2907a0b194f30eb693d81900e78..8c10e991f71b4590f3c700ce4b31bf92dd50c4b5 100644 --- a/receiver/src/request.cpp +++ b/receiver/src/request.cpp @@ -5,7 +5,8 @@ namespace asapo { Request::Request(const GenericRequestHeader& header, - SocketDescriptor socket_fd) : io__{GenerateDefaultIO()}, request_header_(header), socket_fd_{socket_fd} { + SocketDescriptor socket_fd, std::string origin_uri) : io__{GenerateDefaultIO()}, request_header_(header), + socket_fd_{socket_fd}, origin_uri_{std::move(origin_uri)} { } Error Request::AllocateDataBuffer() { @@ -29,23 +30,23 @@ Error Request::ReceiveData() { } -Error Request::Handle(std::unique_ptr<Statistics>* statistics) { +Error Request::Handle(Statistics* statistics) { Error err; if (request_header_.data_size != 0) { - (*statistics)->StartTimer(StatisticEntity::kNetwork); + statistics->StartTimer(StatisticEntity::kNetwork); auto err = ReceiveData(); if (err) { return err; } - (*statistics)->StopTimer(); + statistics->StopTimer(); } for (auto handler : handlers_) { - (*statistics)->StartTimer(handler->GetStatisticEntity()); - auto err = handler->ProcessRequest(*this); + statistics->StartTimer(handler->GetStatisticEntity()); + auto err = handler->ProcessRequest(this); if (err) { return err; } - (*statistics)->StopTimer(); + statistics->StopTimer(); } return nullptr; } @@ -77,14 +78,39 @@ std::string Request::GetFileName() const { return std::to_string(request_header_.data_id) + ".bin"; } +const std::string& Request::GetOriginUri() const { + return origin_uri_; +} +const std::string& Request::GetBeamtimeId() const { + return beamtime_id_; +} +void Request::SetBeamtimeId(std::string beamtime_id) { + beamtime_id_ = std::move(beamtime_id); +} + +Opcode Request::GetOpCode() const { + return request_header_.op_code; +} +const char* Request::GetMessage() const { + return request_header_.message; +} + +void Request::SetBeamline(std::string beamline) { + beamline_ = std::move(beamline); +} + +const std::string& Request::GetBeamline() const { + return beamline_; +} + std::unique_ptr<Request> RequestFactory::GenerateRequest(const GenericRequestHeader& - request_header, SocketDescriptor socket_fd, + request_header, SocketDescriptor socket_fd, std::string origin_uri, Error* err) const noexcept { *err = nullptr; + auto request = std::unique_ptr<Request> {new Request{request_header, socket_fd, std::move(origin_uri)}}; switch (request_header.op_code) { case Opcode::kOpcodeTransferData: { - auto request = std::unique_ptr<Request> {new Request{request_header, socket_fd}}; - + request->AddHandler(&request_handler_authorize_); if (GetReceiverConfig()->write_to_disk) { request->AddHandler(&request_handler_filewrite_); } @@ -95,6 +121,10 @@ std::unique_ptr<Request> RequestFactory::GenerateRequest(const GenericRequestHea return request; } + case Opcode::kOpcodeAuthorize: { + request->AddHandler(&request_handler_authorize_); + return request; + } default: *err = ReceiverErrorTemplates::kInvalidOpCode.Generate(); return nullptr; diff --git a/receiver/src/request.h b/receiver/src/request.h index 3b2f05698d8d7482e5f4c5127165ebd267a6edd9..b9350a9b6226bbf468031ccc614589d8fc12646e 100644 --- a/receiver/src/request.h +++ b/receiver/src/request.h @@ -1,30 +1,42 @@ #ifndef ASAPO_REQUEST_H #define ASAPO_REQUEST_H +#include <string> + #include "receiver_error.h" #include "common/networking.h" #include "io/io.h" #include "request_handler.h" #include "request_handler_file_write.h" #include "request_handler_db_write.h" +#include "request_handler_authorize.h" + #include "statistics.h" +#include "preprocessor/definitions.h" namespace asapo { using RequestHandlerList = std::vector<const RequestHandler*>; class Request { public: - virtual Error Handle(std::unique_ptr<Statistics>*); - virtual ~Request() = default; - Request(const GenericRequestHeader& request_header, SocketDescriptor socket_fd); - void AddHandler(const RequestHandler*); - const RequestHandlerList& GetListHandlers() const; - virtual uint64_t GetDataSize() const; - virtual uint64_t GetDataID() const; - virtual std::string GetFileName() const; - - virtual const FileData& GetData() const; + VIRTUAL Error Handle(Statistics*); + ~Request() = default; + Request(const GenericRequestHeader& request_header, SocketDescriptor socket_fd, std::string origin_uri); + VIRTUAL void AddHandler(const RequestHandler*); + VIRTUAL const RequestHandlerList& GetListHandlers() const; + VIRTUAL uint64_t GetDataSize() const; + VIRTUAL uint64_t GetDataID() const; + VIRTUAL std::string GetFileName() const; + VIRTUAL const FileData& GetData() const; + VIRTUAL Opcode GetOpCode() const; + VIRTUAL const char* GetMessage() const; + + const std::string& GetOriginUri() const; + VIRTUAL const std::string& GetBeamtimeId() const; + VIRTUAL void SetBeamtimeId(std::string beamtime_id); + VIRTUAL void SetBeamline(std::string beamline); + VIRTUAL const std::string& GetBeamline() const; std::unique_ptr<IO> io__; private: Error AllocateDataBuffer(); @@ -33,15 +45,19 @@ class Request { const SocketDescriptor socket_fd_; FileData data_buffer_; RequestHandlerList handlers_; + std::string origin_uri_; + std::string beamtime_id_; + std::string beamline_; }; class RequestFactory { public: virtual std::unique_ptr<Request> GenerateRequest(const GenericRequestHeader& request_header, - SocketDescriptor socket_fd, Error* err) const noexcept; + SocketDescriptor socket_fd, std::string origin_uri, Error* err) const noexcept; private: RequestHandlerFileWrite request_handler_filewrite_; RequestHandlerDbWrite request_handler_dbwrite_; + RequestHandlerAuthorize request_handler_authorize_; }; } diff --git a/receiver/src/request_handler.h b/receiver/src/request_handler.h index b4a0dc1f39724047876847dbcbe5f8cc7e5d8002..49d4da20a8cc3b555d3d3f3a875cf3434d5553f0 100644 --- a/receiver/src/request_handler.h +++ b/receiver/src/request_handler.h @@ -10,7 +10,7 @@ class Request; class RequestHandler { public: - virtual Error ProcessRequest(const Request& request) const = 0; + virtual Error ProcessRequest(Request* request) const = 0; virtual StatisticEntity GetStatisticEntity() const = 0; virtual ~RequestHandler() = default; private: diff --git a/receiver/src/request_handler_authorize.cpp b/receiver/src/request_handler_authorize.cpp new file mode 100644 index 0000000000000000000000000000000000000000..597584601c01f0646920239b85a55574c2418418 --- /dev/null +++ b/receiver/src/request_handler_authorize.cpp @@ -0,0 +1,104 @@ +#include "request_handler_authorize.h" +#include "receiver_config.h" +#include "receiver_logger.h" +#include "request.h" + +#include "json_parser/json_parser.h" + +using std::chrono::high_resolution_clock; + +namespace asapo { + +std::string RequestHandlerAuthorize::GetRequestString(const Request* request, const char* beamtime_id) const { + std::string request_string = std::string("{\"BeamtimeId\":\"") + + beamtime_id + "\",\"OriginHost\":\"" + request->GetOriginUri() + "\"}"; + return request_string; +} + +Error RequestHandlerAuthorize::ErrorFromServerResponse(const Error& err, HttpCode code) const { + Error auth_error = asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate(); + if (err) { + auth_error->Append(err->Explain()); + return auth_error; + } else { + auth_error->Append("return code " + std::to_string(int(code))); + return auth_error; + } +} + +Error RequestHandlerAuthorize::Authorize(Request* request, const char* beamtime_id) const { + HttpCode code; + Error err; + std::string request_string = GetRequestString(request, beamtime_id); + + auto response = http_client__->Post(GetReceiverConfig()->authorization_server + "/authorize", request_string, &code, + &err); + if (err || code != HttpCode::OK) { + auto auth_error = ErrorFromServerResponse(err, code); + log__->Error("failure authorizing at " + GetReceiverConfig()->authorization_server + " request: " + request_string + + " - " + + auth_error->Explain()); + return auth_error; + } + + last_updated_ = high_resolution_clock::now(); + + JsonStringParser parser{response}; + (err = parser.GetString("BeamtimeId", &beamtime_id_)) || + (err = parser.GetString("Beamline", &beamline_)); + if (err) { + return ErrorFromServerResponse(err, code); + } + + return nullptr; +} + +Error RequestHandlerAuthorize::ProcessAuthorizationRequest(Request* request) const { + if (!beamtime_id_.empty()) { + Error auth_error = asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate(); + auth_error->Append("already authorized"); + log__->Error("failure authorizing at " + GetReceiverConfig()->authorization_server + " - " + + "already authorized"); + return auth_error; + } + + return Authorize(request, request->GetMessage()); +} + +Error RequestHandlerAuthorize::ProcessOtherRequest(Request* request) const { + if (beamtime_id_.empty()) { + return ReceiverErrorTemplates::kAuthorizationFailure.Generate(); + } + + auto elapsed_ms = std::chrono::duration_cast<std::chrono::milliseconds> + (high_resolution_clock::now() - last_updated_).count(); + if (elapsed_ms >= GetReceiverConfig()->authorization_interval_ms) { + auto err = Authorize(request, beamtime_id_.c_str()); + if (err) { + return err; + } + } + request->SetBeamtimeId(beamtime_id_); + request->SetBeamline(beamline_); + return nullptr; +} + + +Error RequestHandlerAuthorize::ProcessRequest(Request* request) const { + if (request->GetOpCode() == kOpcodeAuthorize) { + return ProcessAuthorizationRequest(request); + } else { + return ProcessOtherRequest(request); + } +} + +RequestHandlerAuthorize::RequestHandlerAuthorize(): log__{GetDefaultReceiverLogger()}, + http_client__{DefaultHttpClient()} { +} + +StatisticEntity RequestHandlerAuthorize::GetStatisticEntity() const { + return StatisticEntity::kAuthorizer; +} + + +} \ No newline at end of file diff --git a/receiver/src/request_handler_authorize.h b/receiver/src/request_handler_authorize.h new file mode 100644 index 0000000000000000000000000000000000000000..59069240cbe2988e48bed21c7d78672f75aa62f2 --- /dev/null +++ b/receiver/src/request_handler_authorize.h @@ -0,0 +1,36 @@ +#ifndef ASAPO_REQUEST_HANDLER_AUTHORIZE_H +#define ASAPO_REQUEST_HANDLER_AUTHORIZE_H + +#include <chrono> + +#include "request_handler.h" +#include "logger/logger.h" +#include "http_client/http_client.h" + + +#include "io/io.h" + +namespace asapo { + +class RequestHandlerAuthorize final: public RequestHandler { + public: + RequestHandlerAuthorize(); + StatisticEntity GetStatisticEntity() const override; + Error ProcessRequest(Request* request) const override; + const AbstractLogger* log__; + std::unique_ptr<HttpClient>http_client__; + private: + mutable std::string beamtime_id_; + mutable std::string beamline_; + mutable std::chrono::high_resolution_clock::time_point last_updated_; + Error ProcessAuthorizationRequest(Request* request) const; + Error ProcessOtherRequest(Request* request) const; + Error Authorize(Request* request, const char* beamtime_id) const; + Error ErrorFromServerResponse(const Error& err, HttpCode code) const; + + std::string GetRequestString(const Request* request, const char* beamtime_id) const; +}; + +} + +#endif //ASAPO_REQUEST_HANDLER_AUTHORIZE_H diff --git a/receiver/src/request_handler_db_write.cpp b/receiver/src/request_handler_db_write.cpp index 9461205a9144e785cf7edb07764effedcc2a0f7c..7da85f40d12bd794abe320df6e4b52a7738fbf50 100644 --- a/receiver/src/request_handler_db_write.cpp +++ b/receiver/src/request_handler_db_write.cpp @@ -5,18 +5,22 @@ namespace asapo { -Error RequestHandlerDbWrite::ProcessRequest(const Request& request) const { +Error RequestHandlerDbWrite::ProcessRequest(Request* request) const { + if (db_name_.empty()) { + db_name_ = request->GetBeamtimeId(); + } + if (Error err = ConnectToDbIfNeeded() ) { return err; } FileInfo file_info; - file_info.name = request.GetFileName(); - file_info.size = request.GetDataSize(); - file_info.id = request.GetDataID(); + file_info.name = request->GetFileName(); + file_info.size = request->GetDataSize(); + file_info.id = request->GetDataID(); auto err = db_client__->Insert(file_info, false); if (!err) { - log__->Debug(std::string{"insert record to "} + kDBCollectionName + " in " + GetReceiverConfig()->broker_db_name + + log__->Debug(std::string{"insert record to "} + kDBCollectionName + " in " + db_name_ + " at " + GetReceiverConfig()->broker_db_uri); } return err; @@ -34,7 +38,7 @@ StatisticEntity RequestHandlerDbWrite::GetStatisticEntity() const { Error RequestHandlerDbWrite::ConnectToDbIfNeeded() const { if (!connected_to_db) { - Error err = db_client__->Connect(GetReceiverConfig()->broker_db_uri, GetReceiverConfig()->broker_db_name, + Error err = db_client__->Connect(GetReceiverConfig()->broker_db_uri, db_name_, kDBCollectionName); if (err) { return err; diff --git a/receiver/src/request_handler_db_write.h b/receiver/src/request_handler_db_write.h index 8fbd671dcdef29dd62aed51bda16e19657330b39..dc0152f848739da55470df9dea7b828c70d6b942 100644 --- a/receiver/src/request_handler_db_write.h +++ b/receiver/src/request_handler_db_write.h @@ -13,12 +13,13 @@ class RequestHandlerDbWrite final: public RequestHandler { public: RequestHandlerDbWrite(); StatisticEntity GetStatisticEntity() const override; - Error ProcessRequest(const Request& request) const override; + Error ProcessRequest(Request* request) const override; std::unique_ptr<Database> db_client__; const AbstractLogger* log__; private: Error ConnectToDbIfNeeded() const; mutable bool connected_to_db = false; + mutable std::string db_name_; }; } diff --git a/receiver/src/request_handler_file_write.cpp b/receiver/src/request_handler_file_write.cpp index 62f0c6395885b17a812892e80a126abbe1ed7400..552393f76dbb4ad0cec55feb6435f163614a9c5e 100644 --- a/receiver/src/request_handler_file_write.cpp +++ b/receiver/src/request_handler_file_write.cpp @@ -7,16 +7,18 @@ namespace asapo { -Error RequestHandlerFileWrite::ProcessRequest(const Request& request) const { - auto fsize = request.GetDataSize(); +Error RequestHandlerFileWrite::ProcessRequest(Request* request) const { + auto fsize = request->GetDataSize(); if (fsize <= 0 || fsize > kMaxFileSize) { return ReceiverErrorTemplates::kBadRequest.Generate(); } - const FileData& data = request.GetData(); + const FileData& data = request->GetData(); - auto fname = request.GetFileName(); - auto root_folder = GetReceiverConfig()->root_folder + kPathSeparator; + auto fname = request->GetFileName(); + auto root_folder = GetReceiverConfig()->root_folder + kPathSeparator + + request->GetBeamline() + kPathSeparator + + request->GetBeamtimeId() + kPathSeparator; auto err = io__->WriteDataToFile(root_folder + fname, data, fsize); if (!err) { log__->Debug("saved file of size " + std::to_string(fsize) + " to " + root_folder + fname); diff --git a/receiver/src/request_handler_file_write.h b/receiver/src/request_handler_file_write.h index 95756f6881d8d3d17e05bea9ba5ac9923c652bb5..a0d36b48a2aa8dad0a7e6c5a357668746754b2db 100644 --- a/receiver/src/request_handler_file_write.h +++ b/receiver/src/request_handler_file_write.h @@ -14,7 +14,7 @@ class RequestHandlerFileWrite final: public RequestHandler { public: RequestHandlerFileWrite(); StatisticEntity GetStatisticEntity() const override; - Error ProcessRequest(const Request& request) const override; + Error ProcessRequest(Request* request) const override; std::unique_ptr<IO> io__; const AbstractLogger* log__; }; diff --git a/receiver/src/requests_dispatcher.cpp b/receiver/src/requests_dispatcher.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ddd2ad76b415b40b65622e4c4fca241fb33798c3 --- /dev/null +++ b/receiver/src/requests_dispatcher.cpp @@ -0,0 +1,169 @@ +#include "requests_dispatcher.h" +#include "request.h" +#include "io/io_factory.h" +#include "receiver_logger.h" + +namespace asapo { + +RequestsDispatcher::RequestsDispatcher(SocketDescriptor socket_fd, std::string address, + Statistics* statistics) : statistics__{statistics}, + io__{GenerateDefaultIO()}, + log__{GetDefaultReceiverLogger()}, + request_factory__{new RequestFactory{}}, + socket_fd_{socket_fd}, +producer_uri_{std::move(address)} { +} + +NetworkErrorCode GetNetworkCodeFromError(const Error& err) { + if (err) { + if (err == IOErrorTemplates::kFileAlreadyExists) { + return NetworkErrorCode::kNetErrorFileIdAlreadyInUse; + } else if (err == ReceiverErrorTemplates::kAuthorizationFailure) { + return NetworkErrorCode::kNetAuthorizationError; + } else { + return NetworkErrorCode::kNetErrorInternalServerError; + } + } + return NetworkErrorCode::kNetErrorNoError; +} + +Error RequestsDispatcher::ProcessRequest(const std::unique_ptr<Request>& request) const noexcept { + log__->Debug("processing request from " + producer_uri_ ); + Error handle_err; + handle_err = request->Handle(statistics__); + GenericNetworkResponse generic_response; + generic_response.error_code = GetNetworkCodeFromError(handle_err); + strcpy(generic_response.message, ""); + if (handle_err) { + log__->Error("error processing request from " + producer_uri_ + " - " + handle_err->Explain()); + strncpy(generic_response.message, handle_err->Explain().c_str(), kMaxMessageSize); + } + log__->Debug("sending response to " + producer_uri_ ); + Error io_err; + io__->Send(socket_fd_, &generic_response, sizeof(GenericNetworkResponse), &io_err); + if (io_err) { + log__->Error("error sending response to " + producer_uri_ + " - " + io_err->Explain()); + } + return handle_err == nullptr ? std::move(io_err) : std::move(handle_err); +} + +std::unique_ptr<Request> RequestsDispatcher::GetNextRequest(Error* err) const noexcept { +//TODO: to be overwritten with MessagePack (or similar) + GenericRequestHeader generic_request_header; + statistics__-> StartTimer(StatisticEntity::kNetwork); + io__-> Receive(socket_fd_, &generic_request_header, + sizeof(GenericRequestHeader), err); + if(*err) { + log__->Error("error getting next request from " + producer_uri_ + " - " + (*err)-> + Explain() + ); + return nullptr; + } + statistics__-> StopTimer(); + auto request = request_factory__->GenerateRequest(generic_request_header, socket_fd_, producer_uri_, err); + if (*err) { + log__->Error("error processing request from " + producer_uri_ + " - " + (*err)-> + Explain() + ); + } + + return request; +} + + + +/* + #include <cstring> +#include <assert.h> +#include "connection.h" +#include "receiver_error.h" +#include "io/io_factory.h" + +#include "receiver_logger.h" + +namespace asapo { + +size_t Connection::kRequestHandlerMaxBufferSize; +std::atomic<uint32_t> Connection::kNetworkProducerPeerImplGlobalCounter(0); + +Connection::Connection(SocketDescriptor socket_fd, const std::string& address, + std::string receiver_tag) : request_factory__{new RequestFactory}, + io__{GenerateDefaultIO()}, + statistics__{new Statistics}, + log__{GetDefaultReceiverLogger()}, + authorizer__{new ConnectionAuthorizer}, + requests_dispatcher__{new RequestsDispatcher}{ + socket_fd_ = socket_fd; + connection_id_ = kNetworkProducerPeerImplGlobalCounter++; + address_ = address; + statistics__->AddTag("connection_from", address); + statistics__->AddTag("receiver_tag", std::move(receiver_tag)); +} + +uint64_t Connection::GetId() const noexcept { + return connection_id_; +} + + +Error Connection::ReadAuthorizationHeaderIfNeeded() const { + if (auth_header_was_read_) return nullptr; + + Error err; + GenericRequestHeader generic_request_header; + io__->Receive(socket_fd_, &generic_request_header, sizeof(GenericRequestHeader), &err); + if (err) { + log__->Error("error receive authorization header from " + address_ + " - " + err->Explain()); + return err; + } + + if (generic_request_header.op_code != kOpcodeAuthorize) { + std::string msg= "wrong code in authorization header from " + address_; + log__->Error(msg); + return TextError(msg); + } + + beamtime_id_=std::string{generic_request_header.message}; + return nullptr; +} + +Error Connection::SendAuthorizationResponseIfNeeded(const Error& auth_err) const { + if (auth_header_was_read_) return nullptr; + + GenericNetworkResponse generic_response; + if (auth_err == nullptr) { + generic_response.error_code = kNetErrorNoError; + } else { + generic_response.error_code = kNetAuthorizationError; + strcpy(generic_response.message, auth_err->Explain().c_str()); + } + + Error send_err; + io__->Send(socket_fd_, &generic_response, sizeof(GenericNetworkResponse), &send_err); + if (send_err) { + log__->Error("error sending authorization response to " + address_ + " - " + send_err->Explain()); + return send_err; + } + auth_header_was_read_ = true; + return nullptr; +} + +Error Connection::AuthorizeIfNeeded() const { + Error err = ReadAuthorizationHeaderIfNeeded(); + if (err == nullptr) { + err = authorizer__->Authorize(beamtime_id_,address_); + } + Error err_send = SendAuthorizationResponseIfNeeded(err); + + return err == nullptr ? std::move(err_send) : std::move(err); +} + + + +} + + + + + */ + +} diff --git a/receiver/src/requests_dispatcher.h b/receiver/src/requests_dispatcher.h new file mode 100644 index 0000000000000000000000000000000000000000..6af25e506b00a0228ef08705c49144a85fbed6c4 --- /dev/null +++ b/receiver/src/requests_dispatcher.h @@ -0,0 +1,42 @@ +#ifndef ASAPO_REQUESTS_DISPATCHER_H +#define ASAPO_REQUESTS_DISPATCHER_H + +#include "preprocessor/definitions.h" +#include "common/error.h" +#include "request.h" +#include "io/io.h" +#include "statistics.h" +#include "logger/logger.h" + +namespace asapo { + +class RequestsDispatcher { + public: + RequestsDispatcher(SocketDescriptor socket_fd, std::string address, Statistics* statistics); + VIRTUAL Error ProcessRequest(const std::unique_ptr<Request>& request) const noexcept; + VIRTUAL std::unique_ptr<Request> GetNextRequest(Error* err) const noexcept; + Statistics* statistics__; + std::unique_ptr<IO> io__; + const AbstractLogger* log__; + std::unique_ptr<RequestFactory> request_factory__; + private: + SocketDescriptor socket_fd_; + std::string producer_uri_; +}; + +} + +#endif //ASAPO_REQUESTS_DISPATCHER_H + + +/* + mutable bool auth_header_was_read_ = false; + Error ReadAuthorizationHeaderIfNeeded() const; + Error SendAuthorizationResponseIfNeeded(const Error& auth_err) const; + Error AuthorizeIfNeeded() const; + std::unique_ptr<Request> WaitForNewRequest(Error* err) const noexcept; + Error ProcessRequest(const std::unique_ptr<Request>& request) const noexcept; + void ProcessStatisticsAfterRequest(const std::unique_ptr<Request>& request) const noexcept; + mutable std::string beamtime_id_; + + */ \ No newline at end of file diff --git a/receiver/src/statistics.h b/receiver/src/statistics.h index be72cbfe07b283f5eea5c03896ea2ed5682ca046..7d7cee7f6d264a656459f9d3101227df2fed9524 100644 --- a/receiver/src/statistics.h +++ b/receiver/src/statistics.h @@ -18,6 +18,7 @@ enum StatisticEntity : int { kDatabase = 0, kDisk, kNetwork, + kAuthorizer }; struct StatisticsToSend { diff --git a/receiver/unittests/mock_receiver_config.cpp b/receiver/unittests/mock_receiver_config.cpp index 6b945bdd5d833f46d9e5c245b4b5d264e70aca6b..ae51f8f86bcaa0e294882f2f3f53ae454aaad5b6 100644 --- a/receiver/unittests/mock_receiver_config.cpp +++ b/receiver/unittests/mock_receiver_config.cpp @@ -36,9 +36,10 @@ Error SetReceiverConfig (const ReceiverConfig& config) { auto config_string = std::string("{\"MonitorDbAddress\":") + "\"" + config.monitor_db_uri + "\""; config_string += "," + std::string("\"MonitorDbName\":") + "\"" + config.monitor_db_name + "\""; - config_string += "," + std::string("\"BrokerDbName\":") + "\"" + config.broker_db_name + "\""; config_string += "," + std::string("\"BrokerDbAddress\":") + "\"" + config.broker_db_uri + "\""; config_string += "," + std::string("\"ListenPort\":") + std::to_string(config.listen_port); + config_string += "," + std::string("\"AuthorizationInterval\":") + std::to_string(config.authorization_interval_ms); + config_string += "," + std::string("\"AuthorizationServer\":") + "\"" + config.authorization_server + "\""; config_string += "," + std::string("\"WriteToDisk\":") + (config.write_to_disk ? "true" : "false"); config_string += "," + std::string("\"WriteToDb\":") + (config.write_to_db ? "true" : "false"); config_string += "," + std::string("\"LogLevel\":") + "\"" + log_level + "\""; @@ -48,6 +49,7 @@ Error SetReceiverConfig (const ReceiverConfig& config) { config_string += "}"; + EXPECT_CALL(mock_io, ReadFileToString_t("fname", _)).WillOnce( testing::Return(config_string) ); diff --git a/receiver/unittests/mock_statistics.h b/receiver/unittests/receiver_mocking.h similarity index 60% rename from receiver/unittests/mock_statistics.h rename to receiver/unittests/receiver_mocking.h index f639caa0433fb57f23e900c7daafb443494cd998..6881a0b51428d83b4a4fa2ccf9ce2e7edd522334 100644 --- a/receiver/unittests/mock_statistics.h +++ b/receiver/unittests/receiver_mocking.h @@ -5,6 +5,7 @@ #include <gmock/gmock.h> #include "../src/statistics.h" +#include "../src/request.h" namespace asapo { @@ -42,6 +43,24 @@ class MockStatistics : public asapo::Statistics { }; +class MockRequest: public Request { + public: + MockRequest(const GenericRequestHeader& request_header, SocketDescriptor socket_fd, std::string origin_uri): + Request(request_header, socket_fd, std::move(origin_uri)) {}; + + MOCK_CONST_METHOD0(GetFileName, std::string()); + MOCK_CONST_METHOD0(GetDataSize, uint64_t()); + MOCK_CONST_METHOD0(GetDataID, uint64_t()); + MOCK_CONST_METHOD0(GetData, const asapo::FileData & ()); + MOCK_CONST_METHOD0(GetBeamtimeId, const std::string & ()); + MOCK_CONST_METHOD0(GetBeamline, const std::string & ()); + MOCK_CONST_METHOD0(GetOpCode, asapo::Opcode ()); + MOCK_CONST_METHOD0(GetMessage, const char* ()); + MOCK_METHOD1(SetBeamtimeId, void (std::string)); + MOCK_METHOD1(SetBeamline, void (std::string)); +}; + + } #endif //ASAPO_MOCK_STATISTICS_H diff --git a/receiver/unittests/test_config.cpp b/receiver/unittests/test_config.cpp index bc3487bad984a869e406d49c24833379f231b890..45e2a4e1a628ab745523edde35919ccd9f23ec2f 100644 --- a/receiver/unittests/test_config.cpp +++ b/receiver/unittests/test_config.cpp @@ -56,9 +56,10 @@ TEST_F(ConfigTests, ReadSettings) { test_config.write_to_disk = true; test_config.write_to_db = true; test_config.broker_db_uri = "localhost:27017"; - test_config.broker_db_name = "test"; test_config.log_level = asapo::LogLevel::Error; test_config.root_folder = "test_fodler"; + test_config.authorization_interval_ms = 10000; + test_config.authorization_server = "AuthorizationServer"; auto err = asapo::SetReceiverConfig(test_config); @@ -68,8 +69,9 @@ TEST_F(ConfigTests, ReadSettings) { ASSERT_THAT(config->monitor_db_uri, Eq("localhost:8086")); ASSERT_THAT(config->monitor_db_name, Eq("db_test")); ASSERT_THAT(config->broker_db_uri, Eq("localhost:27017")); - ASSERT_THAT(config->broker_db_name, Eq("test")); ASSERT_THAT(config->listen_port, Eq(4200)); + ASSERT_THAT(config->authorization_interval_ms, Eq(10000)); + ASSERT_THAT(config->authorization_server, Eq("AuthorizationServer")); ASSERT_THAT(config->write_to_disk, Eq(true)); ASSERT_THAT(config->write_to_db, Eq(true)); ASSERT_THAT(config->log_level, Eq(asapo::LogLevel::Error)); diff --git a/receiver/unittests/test_connection.cpp b/receiver/unittests/test_connection.cpp index af7d83535f771910921fdb508841a6ff6af86894..2ce08a8fca7e6bc8264c4508cb2651cc0302a95a 100644 --- a/receiver/unittests/test_connection.cpp +++ b/receiver/unittests/test_connection.cpp @@ -7,7 +7,13 @@ #include "../src/receiver_error.h" #include "../src/request.h" #include "../src/statistics.h" -#include "mock_statistics.h" +#include "receiver_mocking.h" +#include "../src/receiver_config.h" +#include "../src/receiver_config_factory.h" +#include "../src/requests_dispatcher.h" + +#include "mock_receiver_config.h" + using ::testing::Test; using ::testing::Return; @@ -23,8 +29,10 @@ using ::testing::SaveArg; using ::testing::SaveArgPointee; using ::testing::InSequence; using ::testing::HasSubstr; +using ::testing::StrEq; using ::testing::SetArgPointee; using ::testing::AllOf; +using testing::Sequence; using asapo::Error; using asapo::ErrorInterface; @@ -43,215 +51,133 @@ using asapo::Statistics; using asapo::StatisticEntity; using asapo::MockStatistics; +using asapo::ReceiverConfig; +using asapo::SetReceiverConfig; + namespace { TEST(Connection, Constructor) { Connection connection{0, "some_address", "some_tag"}; ASSERT_THAT(dynamic_cast<asapo::Statistics*>(connection.statistics__.get()), Ne(nullptr)); - ASSERT_THAT(dynamic_cast<asapo::IO*>(connection.io__.get()), Ne(nullptr)); - ASSERT_THAT(dynamic_cast<asapo::RequestFactory*>(connection.request_factory__.get()), Ne(nullptr)); ASSERT_THAT(dynamic_cast<const asapo::AbstractLogger*>(connection.log__), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<asapo::RequestsDispatcher*>(connection.requests_dispatcher__.get()), Ne(nullptr)); } -class MockRequestHandler: public Request { - public: - MockRequestHandler(const GenericRequestHeader& request_header, SocketDescriptor socket_fd): - Request(request_header, socket_fd) {}; - Error Handle(std::unique_ptr<Statistics>* statistics) override { - return Error{Handle_t()}; - }; - MOCK_CONST_METHOD0(Handle_t, ErrorInterface * ()); -}; - -class MockRequestFactory: public asapo::RequestFactory { +class MockDispatcher: public asapo::RequestsDispatcher { public: - std::unique_ptr<Request> GenerateRequest(const GenericRequestHeader& request_header, - SocketDescriptor socket_fd, - Error* err) const noexcept override { + MockDispatcher(): asapo::RequestsDispatcher(0, "", nullptr) {}; + Error ProcessRequest(const std::unique_ptr<Request>& request) const noexcept override { + return Error{ProcessRequest_t(request.get())}; + } + + std::unique_ptr<Request> GetNextRequest(Error* err) const noexcept override { ErrorInterface* error = nullptr; - auto res = GenerateRequest_t(request_header, socket_fd, &error); + auto req = GetNextRequest_t(&error); err->reset(error); - return std::unique_ptr<Request> {res}; - } + return std::unique_ptr<Request> {req}; + }; - MOCK_CONST_METHOD3(GenerateRequest_t, Request * (const GenericRequestHeader&, - SocketDescriptor socket_fd, - ErrorInterface**)); + MOCK_CONST_METHOD1(ProcessRequest_t, ErrorInterface * (Request*)); + MOCK_CONST_METHOD1(GetNextRequest_t, Request * (asapo::ErrorInterface**)); }; + class ConnectionTests : public Test { public: std::string connected_uri{"some_address"}; - Connection connection{0, connected_uri, "some_tag"}; - MockIO mock_io; - MockRequestFactory mock_factory; + NiceMock<MockIO> mock_io; + MockDispatcher mock_dispatcher; NiceMock<MockStatistics> mock_statictics; NiceMock<asapo::MockLogger> mock_logger; + std::unique_ptr<Connection> connection; void SetUp() override { - connection.io__ = std::unique_ptr<asapo::IO> {&mock_io}; - connection.statistics__ = std::unique_ptr<asapo::Statistics> {&mock_statictics}; - connection.request_factory__ = std::unique_ptr<asapo::RequestFactory> {&mock_factory}; - connection.log__ = &mock_logger; - - ON_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)). - WillByDefault(DoAll(testing::SetArgPointee<4>(nullptr), - testing::Return(0))); + connection = std::unique_ptr<Connection> {new Connection{0, connected_uri, "some_tag"}}; + connection->io__ = std::unique_ptr<asapo::IO> {&mock_io}; + connection->statistics__ = std::unique_ptr<asapo::Statistics> {&mock_statictics}; + connection->log__ = &mock_logger; + connection->requests_dispatcher__ = std::unique_ptr<asapo::RequestsDispatcher> {&mock_dispatcher}; EXPECT_CALL(mock_io, CloseSocket_t(_, _)); EXPECT_CALL(mock_statictics, Send_t()); + EXPECT_CALL(mock_logger, Info(HasSubstr("disconnected"))); } void TearDown() override { - connection.io__.release(); - connection.request_factory__.release(); - connection.statistics__.release(); + connection->io__.release(); + connection->statistics__.release(); + connection->requests_dispatcher__.release(); } -}; - - -TEST_F(ConnectionTests, ErrorWaitForNewRequest) { - - EXPECT_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)).Times(2). - WillOnce( - DoAll(SetArgPointee<4>(new asapo::IOError("", asapo::IOErrorType::kTimeout)), - Return(0))) - .WillOnce( - DoAll(SetArgPointee<4>(new asapo::IOError("", asapo::IOErrorType::kUnknownIOError)), - Return(0)) - ); - - EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("waiting for request"), HasSubstr(connected_uri)))); - - - connection.Listen(); -} - -ACTION_P(SaveArg1ToGenericNetworkResponse, value) { - auto resp = *static_cast<const GenericNetworkResponse*>(arg1); - value->error_code = resp.error_code; -} - - -TEST_F(ConnectionTests, CallsHandleRequest) { - - GenericRequestHeader header; - auto request = new MockRequestHandler{header, 1}; - - EXPECT_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)); - - EXPECT_CALL(mock_factory, GenerateRequest_t(_, _, _)).WillOnce( - Return(request) - ); - - EXPECT_CALL(*request, Handle_t()).WillOnce( - Return(new asapo::SimpleError{""}) - ); - - EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("processing request"), HasSubstr(connected_uri)))); - - - EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("processing request"), HasSubstr(connected_uri)))); - - - EXPECT_CALL(mock_io, Send_t(_, _, _, _)).WillOnce( - DoAll(SetArgPointee<3>(new asapo::IOError("Test Send Error", asapo::IOErrorType::kUnknownIOError)), - Return(0) - )); - - EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("sending response"), HasSubstr(connected_uri)))); - - EXPECT_CALL(mock_logger, Info(AllOf(HasSubstr("disconnected"), HasSubstr(connected_uri)))); - - connection.Listen(); -} - -TEST_F(ConnectionTests, SendsErrorToProducer) { - - GenericRequestHeader header; - auto request = new MockRequestHandler{header, 1}; - - EXPECT_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)); - - EXPECT_CALL(mock_factory, GenerateRequest_t(_, _, _)).WillOnce( - Return(request) - ); - - EXPECT_CALL(*request, Handle_t()).WillOnce( - Return(new asapo::SimpleError{""}) - ); - - GenericNetworkResponse response; - EXPECT_CALL(mock_io, Send_t(_, _, sizeof(GenericNetworkResponse), _)).WillOnce( - DoAll(SetArgPointee<3>(new asapo::IOError("Test Send Error", asapo::IOErrorType::kUnknownIOError)), - SaveArg1ToGenericNetworkResponse(&response), - Return(0) - )); + Request* MockGetNext(bool error) { + if (error ) { + EXPECT_CALL(mock_dispatcher, GetNextRequest_t(_)) + .WillOnce(DoAll( + SetArgPointee<0>(new asapo::SimpleError{"error"}), + Return(nullptr) + )); + return nullptr; + } else { + auto request = new Request(GenericRequestHeader{asapo::kOpcodeUnknownOp, 0, 1, ""}, 0, connected_uri); + EXPECT_CALL(mock_dispatcher, GetNextRequest_t(_)) + .WillOnce(DoAll( + SetArgPointee<0>(nullptr), + Return(request) + )); + return request; + } + } - connection.Listen(); + void MockProcessRequest(Request* request, bool error) { + if (error ) { + EXPECT_CALL(mock_dispatcher, ProcessRequest_t(request)) + .WillOnce( + Return(new asapo::SimpleError{"error"}) + ); + } else { + EXPECT_CALL(mock_dispatcher, ProcessRequest_t(request)) + .WillOnce( + Return(nullptr) + ); + } + } - ASSERT_THAT(response.error_code, Eq(asapo::NetworkErrorCode::kNetErrorInternalServerError)); +}; -} -void MockExitCycle(const MockIO& mock_io, MockStatistics& mock_statictics) { - EXPECT_CALL(mock_statictics, StartTimer_t(StatisticEntity::kNetwork)); +TEST_F(ConnectionTests, ExitOnErrorsWithGetNextRequest) { + MockGetNext(true); - EXPECT_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)) - .WillOnce( - DoAll(SetArgPointee<4>(new asapo::IOError("", asapo::IOErrorType::kUnknownIOError)), - Return(0)) - ); + connection->Listen(); } -MockRequestHandler* MockWaitRequest(const MockRequestFactory& mock_factory) { - GenericRequestHeader header; - header.data_size = 1; - auto request = new MockRequestHandler{header, 1}; - EXPECT_CALL(mock_factory, GenerateRequest_t(_, _, _)).WillOnce( - Return(request) - ); - return request; -} -TEST_F(ConnectionTests, FillsStatistics) { +TEST_F(ConnectionTests, ProcessStatisticsWhenOKProcessRequest) { InSequence sequence; + auto request = MockGetNext(false); - EXPECT_CALL(mock_statictics, StartTimer_t(StatisticEntity::kNetwork)); - - EXPECT_CALL(mock_io, ReceiveWithTimeout_t(_, _, _, _, _)); - - EXPECT_CALL(mock_statictics, StopTimer_t()); - - auto request = MockWaitRequest(mock_factory); - - EXPECT_CALL(*request, Handle_t()).WillOnce( - Return(nullptr) - ); - - EXPECT_CALL(mock_io, Send_t(_, _, _, _)).WillOnce( - DoAll(SetArgPointee<3>(nullptr), - Return(0) - )); - + MockProcessRequest(request, false); EXPECT_CALL(mock_statictics, IncreaseRequestCounter_t()); - EXPECT_CALL(mock_statictics, IncreaseRequestDataVolume_t(1 + sizeof(asapo::GenericRequestHeader) + sizeof(asapo::GenericNetworkResponse))); + EXPECT_CALL(mock_statictics, SendIfNeeded_t()); - EXPECT_CALL(mock_statictics, SendIfNeeded_t()); + MockGetNext(true); + + connection->Listen(); +} - MockExitCycle(mock_io, mock_statictics); - connection.Listen(); +TEST_F(ConnectionTests, ExitOnErrorsWithProcessRequest) { + auto request = MockGetNext(false); - std::this_thread::sleep_for(std::chrono::milliseconds(1)); + MockProcessRequest(request, true); + connection->Listen(); } diff --git a/receiver/unittests/test_request.cpp b/receiver/unittests/test_request.cpp index f238e9891b126f63b805d23c7268cb2971deb12f..429a5b8e5ae28bef83c7e5a24761aa81432fb829 100644 --- a/receiver/unittests/test_request.cpp +++ b/receiver/unittests/test_request.cpp @@ -9,7 +9,7 @@ #include "../src/request_handler_db_write.h" #include "database/database.h" -#include "mock_statistics.h" +#include "receiver_mocking.h" #include "mock_receiver_config.h" using ::testing::Test; @@ -48,8 +48,8 @@ namespace { class MockReqestHandler : public asapo::RequestHandler { public: - Error ProcessRequest(const Request& request) const override { - return Error{ProcessRequest_t(request)}; + Error ProcessRequest(Request* request) const override { + return Error{ProcessRequest_t(*request)}; } StatisticEntity GetStatisticEntity() const override { @@ -67,15 +67,20 @@ class RequestTests : public Test { asapo::SocketDescriptor socket_fd_{1}; uint64_t data_size_ {100}; uint64_t data_id_{15}; + std::string expected_origin_uri = "origin_uri"; + asapo::Opcode expected_op_code = asapo::kOpcodeTransferData; + char expected_request_message[asapo::kMaxMessageSize] = "test message"; std::unique_ptr<Request> request; NiceMock<MockIO> mock_io; NiceMock<MockStatistics> mock_statistics; - std::unique_ptr<asapo::Statistics> stat; + asapo::Statistics* stat; void SetUp() override { - stat = std::unique_ptr<asapo::Statistics> {&mock_statistics}; + stat = &mock_statistics; generic_request_header.data_size = data_size_; generic_request_header.data_id = data_id_; - request.reset(new Request{generic_request_header, socket_fd_}); + generic_request_header.op_code = expected_op_code; + strcpy(generic_request_header.message, expected_request_message); + request.reset(new Request{generic_request_header, socket_fd_, expected_origin_uri}); request->io__ = std::unique_ptr<asapo::IO> {&mock_io}; ON_CALL(mock_io, Receive_t(socket_fd_, _, data_size_, _)).WillByDefault( DoAll(SetArgPointee<3>(nullptr), @@ -84,7 +89,6 @@ class RequestTests : public Test { } void TearDown() override { request->io__.release(); - stat.release(); } }; @@ -92,12 +96,12 @@ class RequestTests : public Test { TEST_F(RequestTests, HandleDoesNotReceiveEmptyData) { generic_request_header.data_size = 0; request->io__.release(); - request.reset(new Request{generic_request_header, socket_fd_}); + request.reset(new Request{generic_request_header, socket_fd_, ""}); request->io__ = std::unique_ptr<asapo::IO> {&mock_io};; EXPECT_CALL(mock_io, Receive_t(_, _, _, _)).Times(0); - auto err = request->Handle(&stat); + auto err = request->Handle(stat); ASSERT_THAT(err, Eq(nullptr)); } @@ -108,7 +112,7 @@ TEST_F(RequestTests, HandleReturnsErrorOnDataReceive) { Return(0) )); - auto err = request->Handle(&stat); + auto err = request->Handle(stat); ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kReadError)); } @@ -125,7 +129,7 @@ TEST_F(RequestTests, HandleMeasuresTimeOnDataReceive) { EXPECT_CALL(mock_statistics, StopTimer_t()); - request->Handle(&stat); + request->Handle(stat); } @@ -151,7 +155,7 @@ TEST_F(RequestTests, HandleProcessesRequests) { EXPECT_CALL(mock_statistics, StopTimer_t()).Times(2); - auto err = request->Handle(&stat); + auto err = request->Handle(stat); ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kUnknownIOError)); } @@ -164,7 +168,7 @@ TEST_F(RequestTests, DataIsNullAtInit) { TEST_F(RequestTests, GetDataIsNotNullptr) { - request->Handle(&stat); + request->Handle(stat); auto& data = request->GetData(); @@ -179,6 +183,25 @@ TEST_F(RequestTests, GetDataID) { ASSERT_THAT(id, Eq(data_id_)); } +TEST_F(RequestTests, GetOpCode) { + auto code = request->GetOpCode(); + + ASSERT_THAT(code, Eq(expected_op_code)); +} + + +TEST_F(RequestTests, GetRequestMessage) { + auto message = request->GetMessage(); + + ASSERT_THAT(message, testing::StrEq(expected_request_message)); +} + + +TEST_F(RequestTests, OriginUriEmptyByDefault) { + auto uri = request->GetOriginUri(); + + ASSERT_THAT(uri, Eq(expected_origin_uri)); +} TEST_F(RequestTests, GetDataSize) { @@ -194,5 +217,19 @@ TEST_F(RequestTests, GetFileName) { ASSERT_THAT(fname, Eq(s)); } +TEST_F(RequestTests, SetGetBeamtimeId) { + request->SetBeamtimeId("beamtime"); + + ASSERT_THAT(request->GetBeamtimeId(), "beamtime"); +} + + +TEST_F(RequestTests, SetGetBeamline) { + request->SetBeamline("beamline"); + + ASSERT_THAT(request->GetBeamline(), "beamline"); +} + + } diff --git a/receiver/unittests/test_request_factory.cpp b/receiver/unittests/test_request_factory.cpp index f3a34e8401303efae1dfb1592e0682ac00c7a2cb..326c8011f9aad3e202deafc41ca4d49b9160a658 100644 --- a/receiver/unittests/test_request_factory.cpp +++ b/receiver/unittests/test_request_factory.cpp @@ -9,9 +9,11 @@ #include "../src/request_handler.h" #include "../src/request_handler_file_write.h" #include "../src/request_handler_db_write.h" +#include "../src/request_handler_authorize.h" + #include "database/database.h" -#include "mock_statistics.h" +#include "receiver_mocking.h" #include "mock_receiver_config.h" @@ -52,7 +54,7 @@ class FactoryTests : public Test { Error err{nullptr}; GenericRequestHeader generic_request_header; ReceiverConfig config; - + std::string origin_uri{"origin_uri"}; void SetUp() override { generic_request_header.op_code = asapo::Opcode::kOpcodeTransferData; config.write_to_disk = true; @@ -65,29 +67,42 @@ class FactoryTests : public Test { TEST_F(FactoryTests, ErrorOnWrongCode) { generic_request_header.op_code = asapo::Opcode::kOpcodeUnknownOp; - auto request = factory.GenerateRequest(generic_request_header, 1, &err); + auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err); ASSERT_THAT(err, Ne(nullptr)); } TEST_F(FactoryTests, ReturnsDataRequestOnkNetOpcodeSendDataCode) { generic_request_header.op_code = asapo::Opcode::kOpcodeTransferData; - auto request = factory.GenerateRequest(generic_request_header, 1, &err); + auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err); ASSERT_THAT(err, Eq(nullptr)); ASSERT_THAT(dynamic_cast<asapo::Request*>(request.get()), Ne(nullptr)); - ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerFileWrite*>(request->GetListHandlers()[0]), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerFileWrite*>(request->GetListHandlers()[1]), Ne(nullptr)); ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDbWrite*>(request->GetListHandlers().back()), Ne(nullptr)); } + +TEST_F(FactoryTests, ReturnsDataRequestForAuthorizationCode) { + generic_request_header.op_code = asapo::Opcode::kOpcodeAuthorize; + auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err); + + ASSERT_THAT(err, Eq(nullptr)); + ASSERT_THAT(dynamic_cast<asapo::Request*>(request.get()), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr)); +} + + TEST_F(FactoryTests, DoNotAddDiskWriterIfNotWanted) { config.write_to_disk = false; SetReceiverConfig(config); - auto request = factory.GenerateRequest(generic_request_header, 1, &err); + auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err); ASSERT_THAT(err, Eq(nullptr)); - ASSERT_THAT(request->GetListHandlers().size(), Eq(1)); + ASSERT_THAT(request->GetListHandlers().size(), Eq(2)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr)); ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerDbWrite*>(request->GetListHandlers().back()), Ne(nullptr)); } @@ -96,10 +111,11 @@ TEST_F(FactoryTests, DoNotAddDbWriterIfNotWanted) { SetReceiverConfig(config); - auto request = factory.GenerateRequest(generic_request_header, 1, &err); + auto request = factory.GenerateRequest(generic_request_header, 1, origin_uri, &err); ASSERT_THAT(err, Eq(nullptr)); - ASSERT_THAT(request->GetListHandlers().size(), Eq(1)); - ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerFileWrite*>(request->GetListHandlers()[0]), Ne(nullptr)); + ASSERT_THAT(request->GetListHandlers().size(), Eq(2)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerAuthorize*>(request->GetListHandlers()[0]), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::RequestHandlerFileWrite*>(request->GetListHandlers()[1]), Ne(nullptr)); } diff --git a/receiver/unittests/test_request_handler_authorizer.cpp b/receiver/unittests/test_request_handler_authorizer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3fabc8000510bdcf6a569771144b35a612e29231 --- /dev/null +++ b/receiver/unittests/test_request_handler_authorizer.cpp @@ -0,0 +1,239 @@ +#include <gtest/gtest.h> +#include <gmock/gmock.h> + +#include "unittests/MockHttpClient.h" +#include "unittests/MockLogger.h" + +#include "../src/receiver_error.h" +#include "../src/request.h" +#include "../src/request_handler.h" +#include "../src/request_handler_authorize.h" +#include "common/networking.h" +#include "mock_receiver_config.h" +#include "preprocessor/definitions.h" + +#include "receiver_mocking.h" + +#include "../src/receiver_config.h" + + +using ::testing::Test; +using ::testing::Return; +using ::testing::ReturnRef; +using ::testing::_; +using ::testing::DoAll; +using ::testing::SetArgReferee; +using ::testing::Gt; +using ::testing::Eq; +using ::testing::Ne; +using ::testing::Mock; +using ::testing::NiceMock; +using ::testing::InSequence; +using ::testing::SetArgPointee; +using ::testing::AllOf; +using ::testing::HasSubstr; + +using asapo::MockRequest; +using ::asapo::Error; +using ::asapo::ErrorInterface; +using ::asapo::FileDescriptor; +using ::asapo::SocketDescriptor; +using ::asapo::MockHttpClient; +using asapo::Request; +using asapo::RequestHandlerAuthorize; +using ::asapo::GenericRequestHeader; +using asapo::ReceiverConfig; +using asapo::SetReceiverConfig; +using asapo::HttpCode; + +namespace { + +TEST(Authorizer, Constructor) { + RequestHandlerAuthorize handler; + ASSERT_THAT(dynamic_cast<asapo::HttpClient*>(handler.http_client__.get()), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::AbstractLogger*>(handler.log__), Ne(nullptr)); +} + + +class AuthorizerHandlerTests : public Test { + public: + RequestHandlerAuthorize handler; + MockHttpClient mock_http_client; + std::unique_ptr<MockRequest> mock_request; + ReceiverConfig config; + + NiceMock<asapo::MockLogger> mock_logger; + std::string expected_beamtime_id = "beamtime_id"; + std::string expected_beamline = "beamline"; + std::string expected_producer_uri = "producer_uri"; + std::string expected_authorization_server = "authorizer_host"; + std::string expect_request_string = std::string("{\"BeamtimeId\":\"") + expected_beamtime_id + "\",\"OriginHost\":\"" + + expected_producer_uri + "\"}"; + + void MockRequestData(); + void SetUp() override { + GenericRequestHeader request_header; + mock_request.reset(new MockRequest{request_header, 1, expected_producer_uri}); + handler.http_client__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client}; + handler.log__ = &mock_logger; + config.authorization_server = expected_authorization_server; + config.authorization_interval_ms = 0; + SetReceiverConfig(config); + } + void TearDown() override { + handler.http_client__.release(); + } + void MockAuthRequest(bool error, HttpCode code = HttpCode::OK) { + if (error) { + EXPECT_CALL(mock_http_client, Post_t(expected_authorization_server + "/authorize", expect_request_string, _, _)). + WillOnce( + DoAll(SetArgPointee<3>(new asapo::SimpleError("http error")), + Return("") + )); + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"), + HasSubstr("http error"), + HasSubstr(expected_beamtime_id), + HasSubstr(expected_producer_uri), + HasSubstr(expected_authorization_server)))); + + } else { + EXPECT_CALL(mock_http_client, Post_t(expected_authorization_server + "/authorize", expect_request_string, _, _)). + WillOnce( + DoAll(SetArgPointee<3>(nullptr), + SetArgPointee<2>(code), + Return("{\"BeamtimeId\":\"" + expected_beamtime_id + "\",\"Beamline\":" + "\"" + expected_beamline + "\"}") + )); + if (code != HttpCode::OK) { + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"), + HasSubstr("return code"), + HasSubstr(std::to_string(int(code))), + HasSubstr(expected_beamtime_id), + HasSubstr(expected_producer_uri), + HasSubstr(expected_authorization_server)))); + } + + } + + + } + Error MockFirstAuthorization(bool error, HttpCode code = HttpCode::OK) { + EXPECT_CALL(*mock_request, GetOpCode()) + .WillOnce(Return(asapo::kOpcodeAuthorize)) + ; + EXPECT_CALL(*mock_request, GetMessage()) + .WillOnce(Return(expected_beamtime_id.c_str())) + ; + + MockAuthRequest(error, code); + return handler.ProcessRequest(mock_request.get()); + } + Error MockRequestAuthorization(bool error, HttpCode code = HttpCode::OK) { + EXPECT_CALL(*mock_request, GetOpCode()) + .WillOnce(Return(asapo::kOpcodeTransferData)) + ; + if (!error && code == HttpCode::OK) { + EXPECT_CALL(*mock_request, SetBeamtimeId(expected_beamtime_id)); + EXPECT_CALL(*mock_request, SetBeamline(expected_beamline)); + } + + MockAuthRequest(error, code); + return handler.ProcessRequest(mock_request.get()); + } + +}; + +TEST_F(AuthorizerHandlerTests, CheckStatisticEntity) { + auto entity = handler.GetStatisticEntity(); + ASSERT_THAT(entity, Eq(asapo::StatisticEntity::kAuthorizer)); +} + +TEST_F(AuthorizerHandlerTests, ErrorNotAuthorizedYet) { + EXPECT_CALL(*mock_request, GetOpCode()) + .WillOnce(Return(asapo::kOpcodeTransferData)) + ; + + auto err = handler.ProcessRequest(mock_request.get()); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + +TEST_F(AuthorizerHandlerTests, ErrorProcessingAuthorizeRequest) { + + auto err = MockFirstAuthorization(true); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + + +TEST_F(AuthorizerHandlerTests, AuthorizeRequestreturns401) { + + auto err = MockFirstAuthorization(false, HttpCode::Unauthorized); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + + +TEST_F(AuthorizerHandlerTests, AuthorizeOk) { + auto err = MockFirstAuthorization(false); + + ASSERT_THAT(err, Eq(nullptr)); +} + +TEST_F(AuthorizerHandlerTests, ErrorOnSecondAuthorize) { + MockFirstAuthorization(false); + EXPECT_CALL(*mock_request, GetOpCode()) + .WillOnce(Return(asapo::kOpcodeAuthorize)); + + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("failure authorizing"), + HasSubstr("already authorized"), + HasSubstr(expected_authorization_server)))); + + + auto err = handler.ProcessRequest(mock_request.get()); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + +TEST_F(AuthorizerHandlerTests, ErrorOnDataTransferRequestAuthorize) { + MockFirstAuthorization(false); + auto err = MockRequestAuthorization(true); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + + +TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeReturns401) { + MockFirstAuthorization(false); + + auto err = MockRequestAuthorization(false, HttpCode::Unauthorized); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); +} + +TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeReturnsOK) { + MockFirstAuthorization(false); + + auto err = MockRequestAuthorization(false); + + ASSERT_THAT(err, Eq(nullptr)); +} + +TEST_F(AuthorizerHandlerTests, DataTransferRequestAuthorizeUsesCachedValue) { + config.authorization_interval_ms = 10000; + SetReceiverConfig(config); + MockFirstAuthorization(false); + EXPECT_CALL(*mock_request, GetOpCode()) + .WillOnce(Return(asapo::kOpcodeTransferData)); + EXPECT_CALL(mock_http_client, Post_t(_, _, _, _)).Times(0); + EXPECT_CALL(*mock_request, SetBeamtimeId(expected_beamtime_id)); + EXPECT_CALL(*mock_request, SetBeamline(expected_beamline)); + + auto err = handler.ProcessRequest(mock_request.get()); + + ASSERT_THAT(err, Eq(nullptr)); +} + + + + +} \ No newline at end of file diff --git a/receiver/unittests/test_request_handler_db_writer.cpp b/receiver/unittests/test_request_handler_db_writer.cpp index 465b1efbbd9029c027e912b39efb601d7fd45d1f..db675f8881f6d5c140d13653e3bf9149ca1c4e50 100644 --- a/receiver/unittests/test_request_handler_db_writer.cpp +++ b/receiver/unittests/test_request_handler_db_writer.cpp @@ -14,7 +14,9 @@ #include "mock_receiver_config.h" #include "common/data_structs.h" +#include "receiver_mocking.h" +using asapo::MockRequest; using asapo::FileInfo; using ::testing::Test; using ::testing::Return; @@ -50,35 +52,28 @@ using asapo::ReceiverConfig; namespace { -class MockRequestHandler: public Request { - public: - MockRequestHandler(const GenericRequestHeader& request_header, SocketDescriptor socket_fd): - Request(request_header, socket_fd) {}; - - MOCK_CONST_METHOD0(GetFileName, std::string()); - MOCK_CONST_METHOD0(GetDataSize, uint64_t()); - MOCK_CONST_METHOD0(GetDataID, uint64_t()); - MOCK_CONST_METHOD0(GetData, const asapo::FileData & ()); -}; - class DbWriterHandlerTests : public Test { public: RequestHandlerDbWrite handler; NiceMock<MockIO> mock_io; - std::unique_ptr<NiceMock<MockRequestHandler>> mock_request; + std::unique_ptr<NiceMock<MockRequest>> mock_request; NiceMock<MockDatabase> mock_db; NiceMock<asapo::MockLogger> mock_logger; ReceiverConfig config; + std::string expected_beamtime_id = "beamtime_id"; void SetUp() override { GenericRequestHeader request_header; request_header.data_id = 2; handler.db_client__ = std::unique_ptr<asapo::Database> {&mock_db}; handler.log__ = &mock_logger; - mock_request.reset(new NiceMock<MockRequestHandler> {request_header, 1}); + mock_request.reset(new NiceMock<MockRequest> {request_header, 1, ""}); + ON_CALL(*mock_request, GetBeamtimeId()).WillByDefault(ReturnRef(expected_beamtime_id)); } void TearDown() override { handler.db_client__.release(); } + + }; TEST(DBWritewr, Constructor) { @@ -96,14 +91,18 @@ TEST_F(DbWriterHandlerTests, CheckStatisticEntity) { TEST_F(DbWriterHandlerTests, ProcessRequestCallsConnectDbWhenNotConnected) { - config.broker_db_name = "test"; config.broker_db_uri = "127.0.0.1:27017"; SetReceiverConfig(config); - EXPECT_CALL(mock_db, Connect_t("127.0.0.1:27017", "test", asapo::kDBCollectionName)). + + EXPECT_CALL(*mock_request, GetBeamtimeId()) + .WillOnce(ReturnRef(expected_beamtime_id)) + ; + + EXPECT_CALL(mock_db, Connect_t("127.0.0.1:27017", expected_beamtime_id, asapo::kDBCollectionName)). WillOnce(testing::Return(nullptr)); - auto err = handler.ProcessRequest(*mock_request); + auto err = handler.ProcessRequest(mock_request.get()); ASSERT_THAT(err, Eq(nullptr)); } @@ -112,7 +111,7 @@ TEST_F(DbWriterHandlerTests, ProcessRequestReturnsErrorWhenCannotConnect) { EXPECT_CALL(mock_db, Connect_t(_, _, asapo::kDBCollectionName)). WillOnce(testing::Return(new asapo::SimpleError(""))); - auto err = handler.ProcessRequest(*mock_request); + auto err = handler.ProcessRequest(mock_request.get()); ASSERT_THAT(err, Ne(nullptr)); @@ -124,8 +123,8 @@ TEST_F(DbWriterHandlerTests, ProcessRequestDoesNotCallConnectSecondTime) { EXPECT_CALL(mock_db, Connect_t(_, _, asapo::kDBCollectionName)). WillOnce(testing::Return(nullptr)); - handler.ProcessRequest(*mock_request); - handler.ProcessRequest(*mock_request); + handler.ProcessRequest(mock_request.get()); + handler.ProcessRequest(mock_request.get()); } MATCHER_P(CompareFileInfo, file, "") { @@ -137,13 +136,15 @@ MATCHER_P(CompareFileInfo, file, "") { } - TEST_F(DbWriterHandlerTests, CallsInsert) { - config.broker_db_name = "test"; config.broker_db_uri = "127.0.0.1:27017"; SetReceiverConfig(config); - EXPECT_CALL(mock_db, Connect_t(config.broker_db_uri, config.broker_db_name, asapo::kDBCollectionName)). + EXPECT_CALL(*mock_request, GetBeamtimeId()) + .WillOnce(ReturnRef(expected_beamtime_id)) + ; + + EXPECT_CALL(mock_db, Connect_t(config.broker_db_uri, expected_beamtime_id, asapo::kDBCollectionName)). WillOnce(testing::Return(nullptr)); std::string expected_file_name = "2.bin"; @@ -172,13 +173,13 @@ TEST_F(DbWriterHandlerTests, CallsInsert) { EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("insert record"), HasSubstr(config.broker_db_uri), - HasSubstr(config.broker_db_name), + HasSubstr(expected_beamtime_id), HasSubstr(asapo::kDBCollectionName) ) ) ); - handler.ProcessRequest(*mock_request); + handler.ProcessRequest(mock_request.get()); } } \ No newline at end of file diff --git a/receiver/unittests/test_request_handler_file_write.cpp b/receiver/unittests/test_request_handler_file_write.cpp index 07a5edac2ccecadfe3c1fbb31c26fc8de4adb0c9..5a999e0681aecb1009088bcf92d60b842dec515e 100644 --- a/receiver/unittests/test_request_handler_file_write.cpp +++ b/receiver/unittests/test_request_handler_file_write.cpp @@ -12,6 +12,8 @@ #include "mock_receiver_config.h" #include "preprocessor/definitions.h" +#include "receiver_mocking.h" + using ::testing::Test; using ::testing::Return; using ::testing::ReturnRef; @@ -37,6 +39,7 @@ using ::asapo::MockIO; using asapo::Request; using asapo::RequestHandlerFileWrite; using ::asapo::GenericRequestHeader; +using asapo::MockRequest; namespace { @@ -46,30 +49,21 @@ TEST(FileWrite, Constructor) { ASSERT_THAT(dynamic_cast<const asapo::AbstractLogger*>(handler.log__), Ne(nullptr)); } - -class MockRequestHandler: public Request { - public: - MockRequestHandler(const GenericRequestHeader& request_header, SocketDescriptor socket_fd): - Request(request_header, socket_fd) {}; - - MOCK_CONST_METHOD0(GetFileName, std::string()); - MOCK_CONST_METHOD0(GetDataSize, uint64_t()); - MOCK_CONST_METHOD0(GetData, const asapo::FileData & ()); -}; - class FileWriteHandlerTests : public Test { public: RequestHandlerFileWrite handler; NiceMock<MockIO> mock_io; - std::unique_ptr<MockRequestHandler> mock_request; + std::unique_ptr<MockRequest> mock_request; NiceMock<asapo::MockLogger> mock_logger; std::string expected_file_name = "2.bin"; + std::string expected_beamtime_id = "beamtime_id"; + std::string expected_beamline = "beamline"; uint64_t expected_file_size = 10; void MockRequestData(); void SetUp() override { GenericRequestHeader request_header; request_header.data_id = 2; - mock_request.reset(new MockRequestHandler{request_header, 1}); + mock_request.reset(new MockRequest{request_header, 1, ""}); handler.io__ = std::unique_ptr<asapo::IO> {&mock_io}; handler.log__ = &mock_logger; } @@ -90,7 +84,7 @@ TEST_F(FileWriteHandlerTests, ErrorWhenZeroFileSize) { .WillOnce(Return(0)) ; - auto err = handler.ProcessRequest(*mock_request); + auto err = handler.ProcessRequest(mock_request.get()); ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kBadRequest)); } @@ -100,7 +94,7 @@ TEST_F(FileWriteHandlerTests, ErrorWhenTooBigFileSize) { .WillOnce(Return(asapo::kMaxFileSize + 1)) ; - auto err = handler.ProcessRequest(*mock_request); + auto err = handler.ProcessRequest(mock_request.get()); ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kBadRequest)); } @@ -115,6 +109,15 @@ void FileWriteHandlerTests::MockRequestData() { .WillOnce(ReturnRef(data)) ; + EXPECT_CALL(*mock_request, GetBeamtimeId()) + .WillOnce(ReturnRef(expected_beamtime_id)) + ; + + EXPECT_CALL(*mock_request, GetBeamline()) + .WillOnce(ReturnRef(expected_beamline)) + ; + + EXPECT_CALL(*mock_request, GetFileName()) .WillOnce(Return(expected_file_name)) ; @@ -128,14 +131,16 @@ TEST_F(FileWriteHandlerTests, CallsWriteFile) { MockRequestData(); - std::string expected_path = std::string("test_folder") + asapo::kPathSeparator + expected_file_name; + std::string expected_path = std::string("test_folder") + asapo::kPathSeparator + expected_beamline + + asapo::kPathSeparator + expected_beamtime_id + + asapo::kPathSeparator + expected_file_name; EXPECT_CALL(mock_io, WriteDataToFile_t(expected_path.c_str(), _, expected_file_size)) .WillOnce( Return(asapo::IOErrorTemplates::kUnknownIOError.Generate().release()) ); - auto err = handler.ProcessRequest(*mock_request); + auto err = handler.ProcessRequest(mock_request.get()); ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kUnknownIOError)); } @@ -150,11 +155,12 @@ TEST_F(FileWriteHandlerTests, WritesToLog) { EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("saved file"), HasSubstr(expected_file_name), + HasSubstr(expected_beamtime_id), HasSubstr(std::to_string(expected_file_size)) ) ) ); - handler.ProcessRequest(*mock_request); + handler.ProcessRequest(mock_request.get()); } diff --git a/receiver/unittests/test_requests_dispatcher.cpp b/receiver/unittests/test_requests_dispatcher.cpp new file mode 100644 index 0000000000000000000000000000000000000000..561bed160e101ac87224256c608a0574fb489efe --- /dev/null +++ b/receiver/unittests/test_requests_dispatcher.cpp @@ -0,0 +1,265 @@ +#include <gtest/gtest.h> +#include <gmock/gmock.h> + +#include "unittests/MockIO.h" +#include "unittests/MockLogger.h" +#include "../src/receiver_error.h" +#include "../src/request.h" +#include "../src/statistics.h" +#include "receiver_mocking.h" +#include "mock_receiver_config.h" + +#include "../src/requests_dispatcher.h" + + +using ::testing::Test; +using ::testing::Return; +using ::testing::_; +using ::testing::DoAll; +using ::testing::SetArgReferee; +using ::testing::Gt; +using ::testing::Eq; +using ::testing::Ne; +using ::testing::Mock; +using ::testing::NiceMock; +using ::testing::SaveArg; +using ::testing::SaveArgPointee; +using ::testing::InSequence; +using ::testing::HasSubstr; +using ::testing::StrEq; +using ::testing::SetArgPointee; +using ::testing::AllOf; +using testing::Sequence; + +using asapo::Error; +using asapo::ErrorInterface; +using asapo::SocketDescriptor; +using asapo::GenericRequestHeader; +using asapo::SendDataResponse; +using asapo::GenericRequestHeader; +using asapo::GenericNetworkResponse; +using asapo::Opcode; +using asapo::MockIO; +using asapo::MockLogger; +using asapo::Request; +using asapo::Statistics; +using asapo::StatisticEntity; +using asapo::MockStatistics; + + +using asapo::RequestsDispatcher; +using asapo::Statistics; + +namespace { + +TEST(RequestDispatcher, Constructor) { + auto stat = std::unique_ptr<Statistics> {new Statistics}; + RequestsDispatcher dispatcher{0, "some_address", stat.get()}; + ASSERT_THAT(dynamic_cast<const asapo::Statistics*>(dispatcher.statistics__), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<asapo::IO*>(dispatcher.io__.get()), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<asapo::RequestFactory*>(dispatcher.request_factory__.get()), Ne(nullptr)); + ASSERT_THAT(dynamic_cast<const asapo::AbstractLogger*>(dispatcher.log__), Ne(nullptr)); +} + +class MockRequest: public Request { + public: + MockRequest(const GenericRequestHeader& request_header, SocketDescriptor socket_fd): + Request(request_header, socket_fd, "") {}; + Error Handle(Statistics* statistics) override { + return Error{Handle_t()}; + }; + MOCK_CONST_METHOD0(Handle_t, ErrorInterface * ()); +}; + + +class MockRequestFactory: public asapo::RequestFactory { + public: + std::unique_ptr<Request> GenerateRequest(const GenericRequestHeader& request_header, + SocketDescriptor socket_fd, std::string origin_uri, + Error* err) const noexcept override { + ErrorInterface* error = nullptr; + auto res = GenerateRequest_t(request_header, socket_fd, origin_uri, &error); + err->reset(error); + return std::unique_ptr<Request> {res}; + } + + MOCK_CONST_METHOD4(GenerateRequest_t, Request * (const GenericRequestHeader&, + SocketDescriptor , std::string , + ErrorInterface**)); + +}; + + +ACTION_P(SaveArg1ToGenericNetworkResponse, value) { + auto resp = *static_cast<const GenericNetworkResponse*>(arg1); + value->error_code = resp.error_code; + strcpy(value->message, resp.message); +} + +class RequestsDispatcherTests : public Test { + public: + std::unique_ptr<RequestsDispatcher> dispatcher; + std::string connected_uri{"some_address"}; + NiceMock<MockIO> mock_io; + MockRequestFactory mock_factory; + NiceMock<MockStatistics> mock_statictics; + NiceMock<asapo::MockLogger> mock_logger; + + asapo::ReceiverConfig test_config; + GenericRequestHeader header; + MockRequest mock_request{GenericRequestHeader{}, 1}; + std::unique_ptr<Request> request{&mock_request}; + GenericNetworkResponse response; + void SetUp() override { + test_config.authorization_interval_ms = 0; + SetReceiverConfig(test_config); + dispatcher = std::unique_ptr<RequestsDispatcher> {new RequestsDispatcher{0, connected_uri, &mock_statictics}}; + dispatcher->io__ = std::unique_ptr<asapo::IO> {&mock_io}; + dispatcher->statistics__ = &mock_statictics; + dispatcher->request_factory__ = std::unique_ptr<asapo::RequestFactory> {&mock_factory}; + dispatcher->log__ = &mock_logger; + + } + void TearDown() override { + dispatcher->io__.release(); + dispatcher->request_factory__.release(); + request.release(); + } + void MockReceiveRequest(bool error ) { + EXPECT_CALL(mock_io, Receive_t(_, _, _, _)) + .WillOnce( + DoAll(SetArgPointee<3>(error ? asapo::IOErrorTemplates::kUnknownIOError.Generate().release() : nullptr), + Return(0)) + ); + if (error) { + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("getting next request"), HasSubstr(connected_uri)))); + } + + } + void MockCreateRequest(bool error ) { + EXPECT_CALL(mock_factory, GenerateRequest_t(_, _, _, _)) + .WillOnce( + DoAll(SetArgPointee<3>(error ? asapo::ReceiverErrorTemplates::kInvalidOpCode.Generate().release() : nullptr), + Return(nullptr)) + ); + if (error) { + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("error processing request from"), HasSubstr(connected_uri)))); + } + + + } + void MockHandleRequest(bool error, Error err = asapo::IOErrorTemplates::kUnknownIOError.Generate() ) { + EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("processing request from"), HasSubstr(connected_uri)))); + + EXPECT_CALL(mock_request, Handle_t()).WillOnce( + Return(error ? err.release() : nullptr) + ); + if (error) { + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("error processing request from"), HasSubstr(connected_uri)))); + } + + + } + void MockSendResponse(GenericNetworkResponse* response, bool error ) { + EXPECT_CALL(mock_logger, Debug(AllOf(HasSubstr("sending response to"), HasSubstr(connected_uri)))); + ; + EXPECT_CALL(mock_io, Send_t(_, _, _, _)).WillOnce( + DoAll(SetArgPointee<3>(error ? asapo::IOErrorTemplates::kConnectionRefused.Generate().release() : nullptr), + SaveArg1ToGenericNetworkResponse(response), + Return(0) + )); + if (error) { + EXPECT_CALL(mock_logger, Error(AllOf(HasSubstr("error sending response"), HasSubstr(connected_uri)))); + } + + return; + } +}; + + +TEST_F(RequestsDispatcherTests, ErrorReceivetNextRequest) { + EXPECT_CALL(mock_statictics, StartTimer_t(StatisticEntity::kNetwork)); + MockReceiveRequest(true); + + Error err; + dispatcher->GetNextRequest(&err); + + ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kUnknownIOError)); +} + +TEST_F(RequestsDispatcherTests, ErrorCreatetNextRequest) { + MockReceiveRequest(false); + MockCreateRequest(true); + + Error err; + dispatcher->GetNextRequest(&err); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kInvalidOpCode)); +} + +TEST_F(RequestsDispatcherTests, OkCreatetNextRequest) { + MockReceiveRequest(false); + MockCreateRequest(false); + + Error err; + dispatcher->GetNextRequest(&err); + + ASSERT_THAT(err, Eq(nullptr)); +} + + +TEST_F(RequestsDispatcherTests, ErrorProcessRequestErrorSend) { + MockHandleRequest(true); + MockSendResponse(&response, true); + + auto err = dispatcher->ProcessRequest(request); + + ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kUnknownIOError)); +} + + +TEST_F(RequestsDispatcherTests, OkProcessRequestErrorSend) { + MockHandleRequest(false); + MockSendResponse(&response, true); + + auto err = dispatcher->ProcessRequest(request); + + ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kConnectionRefused)); +} + + +TEST_F(RequestsDispatcherTests, OkProcessRequestSendOK) { + MockHandleRequest(false); + MockSendResponse(&response, false); + + auto err = dispatcher->ProcessRequest(request); + + ASSERT_THAT(err, Eq(nullptr)); +} + + +TEST_F(RequestsDispatcherTests, ProcessRequestReturnsAlreadyExist) { + MockHandleRequest(true, asapo::IOErrorTemplates::kFileAlreadyExists.Generate()); + MockSendResponse(&response, false); + + auto err = dispatcher->ProcessRequest(request); + + ASSERT_THAT(err, Eq(asapo::IOErrorTemplates::kFileAlreadyExists)); + ASSERT_THAT(response.error_code, Eq(asapo::kNetErrorFileIdAlreadyInUse)); + ASSERT_THAT(std::string(response.message), HasSubstr(std::string("kFileAlreadyExists"))); +} + +TEST_F(RequestsDispatcherTests, ProcessRequestReturnsAuthorizationFailure) { + MockHandleRequest(true, asapo::ReceiverErrorTemplates::kAuthorizationFailure.Generate()); + MockSendResponse(&response, false); + + auto err = dispatcher->ProcessRequest(request); + + ASSERT_THAT(err, Eq(asapo::ReceiverErrorTemplates::kAuthorizationFailure)); + ASSERT_THAT(response.error_code, Eq(asapo::kNetAuthorizationError)); + ASSERT_THAT(std::string(response.message), HasSubstr("authorization")); +} + + + +} diff --git a/tests/automatic/CMakeLists.txt b/tests/automatic/CMakeLists.txt index bc93ec01b6892abb9614505a95823fd7e90a3b9b..8377afd4aafa7681b21b0900119db67032fb1073 100644 --- a/tests/automatic/CMakeLists.txt +++ b/tests/automatic/CMakeLists.txt @@ -15,6 +15,7 @@ if(BUILD_BROKER) add_subdirectory(broker) endif() +add_subdirectory(authorizer) add_subdirectory(worker) diff --git a/tests/automatic/authorizer/CMakeLists.txt b/tests/automatic/authorizer/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..db373a3122354e2403384f26cdfb4ddcbd964860 --- /dev/null +++ b/tests/automatic/authorizer/CMakeLists.txt @@ -0,0 +1,2 @@ +add_subdirectory(check_authorize) + diff --git a/tests/automatic/authorizer/check_authorize/CMakeLists.txt b/tests/automatic/authorizer/check_authorize/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..188bc617467c371ce606389adf1d52b48b0b976b --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/CMakeLists.txt @@ -0,0 +1,15 @@ +set(TARGET_NAME asapo-authorizer) + +################################ +# Testing +################################ +file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/OpenBeamTimes.txt BEAMTIMES_FILE ) +file(TO_NATIVE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/ip_bl_mapping BEAMLINES_FOLDER ) +if (WIN32) + string(REPLACE "\\" "\\\\" BEAMTIMES_FILE "${BEAMTIMES_FILE}") + string(REPLACE "\\" "\\\\" BEAMLINES_FOLDER "${BEAMLINES_FOLDER}") +endif() + +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/settings.json.in settings.json @ONLY) +add_script_test("${TARGET_NAME}-authorize" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME>" nomem + ) diff --git a/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt b/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt new file mode 100644 index 0000000000000000000000000000000000000000..b218c3ec6a28d10fd6e94d86d61992eb79bdbf65 --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/OpenBeamTimes.txt @@ -0,0 +1,22 @@ +Open beam times as of Thursday, 2018/06/21 11:32 +Faclty BL BeamTime Id kind +flash bl1 11003924 beamtime start: 2018-04-24 +flash bl2 11003921 beamtime start: 2018-06-08 +flash fl24 11001734 beamtime start: 2018-06-13 +flash pg2 11003932 beamtime start: 2018-06-11 +flash thz 11005667 beamtime start: 2018-05-24 +petra3 ext 50000181 beamtime start: 2017-04-12 +petra3 ext 50000193 beamtime start: 2017-10-12 +petra3 ext 50000202 beamtime start: 2017-12-06 +petra3 ext 50000209 beamtime start: 2018-02-19 +petra3 ext 50000211 beamtime start: 2018-02-19 +petra3 ext 50000214 beamtime start: 2018-04-23 +petra3 ext 50000215 beamtime start: 2018-03-23 +petra3 ext 50000216 beamtime start: 2018-03-23 +petra3 ext 50000217 beamtime start: 2018-03-23 +petra3 ext 50000218 beamtime start: 2018-03-23 +petra3 ext 50000219 beamtime start: 2018-04-24 +petra3 ext 50000221 beamtime start: 2018-06-14 +petra3 p01 11004172 beamtime start: 2018-06-20 +petra3 p01 c20180508-000-COM20181 commissioning +petra3 p02.1 11004341 beamtime start: 2018-06-18 diff --git a/tests/automatic/authorizer/check_authorize/check_linux.sh b/tests/automatic/authorizer/check_authorize/check_linux.sh new file mode 100644 index 0000000000000000000000000000000000000000..46eb6532cb37079ca97d8ae5fecb0a6bcaa3dff0 --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/check_linux.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -e + +trap Cleanup EXIT + +Cleanup() { + echo cleanup + kill -9 $authorizeid +} + +$@ -config settings.json & + +sleep 0.3 +authorizeid=`echo $!` + +curl -v --silent --data '{"BeamtimeId":"c20180508-000-COM20181","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr - | grep c20180508-000-COM20181 +curl -v --silent --data '{"BeamtimeId":"c20180508-000-COM20181","OriginHost":"127.0.0.1:5555"}' 127.0.0.1:5007/authorize --stderr - | grep p01 + diff --git a/tests/automatic/authorizer/check_authorize/check_windows.bat b/tests/automatic/authorizer/check_authorize/check_windows.bat new file mode 100644 index 0000000000000000000000000000000000000000..a13d35a67447ef0c4e7e5f40d9efa629369e0a64 --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/check_windows.bat @@ -0,0 +1,18 @@ +set full_name="%1" +set short_name="%~nx1" + +start /B "" "%full_name%" -config settings.json + +ping 1.0.0.0 -n 1 -w 100 > nul + +C:\Curl\curl.exe -v --silent --data "{\"BeamtimeId\":\"c20180508-000-COM20181\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr c20180508-000-COM20181 || goto :error +C:\Curl\curl.exe -v --silent --data "{\"BeamtimeId\":\"c20180508-000-COM20181\",\"OriginHost\":\"127.0.0.1:5555\"}" 127.0.0.1:5007/authorize --stderr - | findstr p01 || goto :error + +goto :clean + +:error +call :clean +exit /b 1 + +:clean +Taskkill /IM "%short_name%" /F diff --git a/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1 b/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1 new file mode 100644 index 0000000000000000000000000000000000000000..9b9883faf44d6f232627e1047d327ea91a47ae08 --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/ip_bl_mapping/127.0.0.1 @@ -0,0 +1 @@ +p01 diff --git a/tests/automatic/authorizer/check_authorize/settings.json.in b/tests/automatic/authorizer/check_authorize/settings.json.in new file mode 100644 index 0000000000000000000000000000000000000000..0d8670b99c17a60fddcc8f1d06d4481ce8374aa6 --- /dev/null +++ b/tests/automatic/authorizer/check_authorize/settings.json.in @@ -0,0 +1,8 @@ +{ + "Port": 5007, + "LogLevel":"debug", + "BeamtimeBeamlineMappingFile":"@BEAMTIMES_FILE@", + "IpBeamlineMappingFolder":"@BEAMLINES_FOLDER@" +} + + diff --git a/tests/automatic/broker/check_monitoring/CMakeLists.txt b/tests/automatic/broker/check_monitoring/CMakeLists.txt index 03487791a2e087c07daac36f97aa6d8752084cfd..fc5151365b1797f635f65dc1f8c8ed8df68169b5 100644 --- a/tests/automatic/broker/check_monitoring/CMakeLists.txt +++ b/tests/automatic/broker/check_monitoring/CMakeLists.txt @@ -4,5 +4,7 @@ set(TARGET_NAME asapo-broker) # Testing ################################ configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY) -add_script_test("${TARGET_NAME}-monitoring" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME>" nomem +configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_secret.key broker_secret.key COPYONLY) + +add_script_test("${TARGET_NAME}-monitoring" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem ) diff --git a/tests/automatic/broker/check_monitoring/check_linux.sh b/tests/automatic/broker/check_monitoring/check_linux.sh index b0a2de9abbec2284cd1e3ff409ba39cc74aa3049..5b3e631b28684e95b4642e820b3416130ad9d1a3 100644 --- a/tests/automatic/broker/check_monitoring/check_linux.sh +++ b/tests/automatic/broker/check_monitoring/check_linux.sh @@ -15,7 +15,10 @@ Cleanup() { influx -execute "create database ${database_name}" -$@ -config settings.json & +token=`$2 token -secret broker_secret.key data` + + +$1 -config settings.json & sleep 0.3 @@ -23,7 +26,7 @@ brokerid=`echo $!` for i in `seq 1 50`; do - curl --silent 127.0.0.1:5005/database/data/next >/dev/null 2>&1 & + curl --silent 127.0.0.1:5005/database/data/next?token=$token >/dev/null 2>&1 & done diff --git a/tests/automatic/broker/get_next/CMakeLists.txt b/tests/automatic/broker/get_next/CMakeLists.txt index 2f661335f06a60ccc400fe0dba03cc9bb2fd5b52..d4f6222d01ec1cd9232a113f4cdd32cec34c45f9 100644 --- a/tests/automatic/broker/get_next/CMakeLists.txt +++ b/tests/automatic/broker/get_next/CMakeLists.txt @@ -4,5 +4,7 @@ set(TARGET_NAME asapo-broker) # Testing ################################ configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings.json COPYONLY) -add_script_test("${TARGET_NAME}-getnext" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME>" nomem +configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_secret.key broker_secret.key COPYONLY) + +add_script_test("${TARGET_NAME}-getnext" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME> $<TARGET_PROPERTY:asapo,EXENAME>" nomem ) diff --git a/tests/automatic/broker/get_next/check_linux.sh b/tests/automatic/broker/get_next/check_linux.sh index 3a3119b1a0df98ed9cbb29a7f6859819e27531ae..f4bf78a6f74689bfbe02418955317dca3472eb62 100644 --- a/tests/automatic/broker/get_next/check_linux.sh +++ b/tests/automatic/broker/get_next/check_linux.sh @@ -15,12 +15,16 @@ Cleanup() { echo "db.data.insert({"_id":2})" | mongo ${database_name} echo "db.data.insert({"_id":1})" | mongo ${database_name} -$@ -config settings.json & +token=`$2 token -secret broker_secret.key data` + +$1 -config settings.json & sleep 0.3 brokerid=`echo $!` -curl -v --silent 127.0.0.1:5005/database/data/next --stderr - | grep '"_id":1' -curl -v --silent 127.0.0.1:5005/database/data/next --stderr - | grep '"_id":2' -curl -v --silent 127.0.0.1:5005/database/data/next --stderr - | grep "not found" + +curl -v --silent 127.0.0.1:5005/database/data/next?token=$token --stderr - | grep '"_id":1' +curl -v --silent 127.0.0.1:5005/database/data/next?token=$token --stderr - | grep '"_id":2' + +curl -v --silent 127.0.0.1:5005/database/data/next?token=$token --stderr - | grep "not found" diff --git a/tests/automatic/broker/get_next/check_windows.bat b/tests/automatic/broker/get_next/check_windows.bat index 443c05422d74a9c346e72ca65cfd5c5535a1f964..dfa4ffa8535ce466d68a547151f8d9b19d81cde2 100644 --- a/tests/automatic/broker/get_next/check_windows.bat +++ b/tests/automatic/broker/get_next/check_windows.bat @@ -7,13 +7,18 @@ echo db.data.insert({"_id":2}) | %mongo_exe% %database_name% || goto :error set full_name="%1" set short_name="%~nx1" +"%2" token -secret broker_secret.key data > token +set /P token=< token + + + start /B "" "%full_name%" -config settings.json ping 1.0.0.0 -n 1 -w 100 > nul -C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next --stderr - | findstr /c:\"_id\":1 || goto :error -C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next --stderr - | findstr /c:\"_id\":2 || goto :error -C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next --stderr - | findstr /c:"not found" || goto :error +C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next?token=%token% --stderr - | findstr /c:\"_id\":1 || goto :error +C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next?token=%token% --stderr - | findstr /c:\"_id\":2 || goto :error +C:\Curl\curl.exe -v --silent 127.0.0.1:5005/database/data/next?token=%token% --stderr - | findstr /c:"not found" || goto :error goto :clean @@ -24,3 +29,4 @@ exit /b 1 :clean Taskkill /IM "%short_name%" /F echo db.dropDatabase() | %mongo_exe% %database_name% +del /f token \ No newline at end of file diff --git a/tests/automatic/broker/read_config/CMakeLists.txt b/tests/automatic/broker/read_config/CMakeLists.txt index 75c1658dd770ecd4950e8f87f35ef49c3bf5bf19..f05b7966943e1c747530de2cbe8014bf683d7dac 100644 --- a/tests/automatic/broker/read_config/CMakeLists.txt +++ b/tests/automatic/broker/read_config/CMakeLists.txt @@ -4,6 +4,7 @@ set(TARGET_NAME asapo-broker) # Testing ################################ configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_settings.json settings_good.json COPYONLY) +configure_file(${CMAKE_SOURCE_DIR}/tests/automatic/settings/broker_secret.key broker_secret.key COPYONLY) configure_file(settings_bad.json settings_bad.json COPYONLY) add_script_test("${TARGET_NAME}-readconfig" "$<TARGET_PROPERTY:${TARGET_NAME},EXENAME>" nomem ) diff --git a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp index a7cc5bd384dbe41127a562a5aed53f86ec5515ce..89fccb0fdb6511e30b1ad6ac4090ab0eefc36e6f 100644 --- a/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp +++ b/tests/automatic/curl_http_client/curl_http_client_command/curl_httpclient_command.cpp @@ -32,7 +32,7 @@ int main(int argc, char* argv[]) { auto args = GetArgs(argc, argv); asapo::Error err; - auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri, "", &err); + auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.uri, "", "", &err); auto server_broker = static_cast<asapo::ServerDataBroker*>(broker.get()); asapo::HttpCode code; diff --git a/tests/automatic/full_chain/simple_chain/CMakeLists.txt b/tests/automatic/full_chain/simple_chain/CMakeLists.txt index a63811d4f4a90e4230614139ea1f403c28add8a7..1f7374efe1954591b0c95d5a8c9f064686481780 100644 --- a/tests/automatic/full_chain/simple_chain/CMakeLists.txt +++ b/tests/automatic/full_chain/simple_chain/CMakeLists.txt @@ -4,4 +4,4 @@ set(TARGET_NAME full_chain_simple_chain) # Testing ################################ prepare_asapo() -add_script_test("${TARGET_NAME}" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker>" nomem) +add_script_test("${TARGET_NAME}" "$<TARGET_FILE:dummy-data-producer> $<TARGET_FILE:getnext_broker> $<TARGET_PROPERTY:asapo,EXENAME>" nomem) diff --git a/tests/automatic/full_chain/simple_chain/check_linux.sh b/tests/automatic/full_chain/simple_chain/check_linux.sh index 11c022e52ccfcf5a5eb5d996b34a1ebdb6363e07..12b1d68ae529465c22d9903dbdef602bc55f3c58 100644 --- a/tests/automatic/full_chain/simple_chain/check_linux.sh +++ b/tests/automatic/full_chain/simple_chain/check_linux.sh @@ -4,28 +4,34 @@ set -e trap Cleanup EXIT -broker_database_name=test_run +beamtime_id=asapo_test +token=`$3 token -secret broker_secret.key $beamtime_id` + monitor_database_name=db_test proxy_address=127.0.0.1:8400 -receiver_folder=/tmp/asapo/receiver/files +beamline=test +receiver_root_folder=/tmp/asapo/receiver/files +receiver_folder=${receiver_root_folder}/${beamline}/${beamtime_id} Cleanup() { echo cleanup - rm -rf ${receiver_folder} + rm -rf ${receiver_root_folder} nomad stop nginx nomad stop receiver nomad stop discovery nomad stop broker + nomad stop authorizer # kill $producerid - echo "db.dropDatabase()" | mongo ${broker_database_name} + echo "db.dropDatabase()" | mongo ${beamtime_id} influx -execute "drop database ${monitor_database_name}" } influx -execute "create database ${monitor_database_name}" -echo "db.${broker_database_name}.insert({dummy:1})" | mongo ${broker_database_name} +echo "db.${beamtime_id}.insert({dummy:1})" | mongo ${beamtime_id} nomad run nginx.nmd +nomad run authorizer.nmd nomad run receiver.nmd nomad run discovery.nmd nomad run broker.nmd @@ -34,8 +40,8 @@ sleep 1 #producer mkdir -p ${receiver_folder} -$1 localhost:8400 100 1000 4 0 & +$1 localhost:8400 ${beamtime_id} 100 1000 4 0 100 & #producerid=`echo $!` -$2 ${proxy_address} ${broker_database_name} 2 | grep "Processed 1000 file(s)" +$2 ${proxy_address} ${beamtime_id} 2 $token | grep "Processed 1000 file(s)" diff --git a/tests/automatic/full_chain/simple_chain/check_windows.bat b/tests/automatic/full_chain/simple_chain/check_windows.bat index 2c6f5291593fc91718a68b81a4415f191bdad4ea..f26def490a092b7281adf7e2e11bfd4dfcdf3156 100644 --- a/tests/automatic/full_chain/simple_chain/check_windows.bat +++ b/tests/automatic/full_chain/simple_chain/check_windows.bat @@ -1,11 +1,19 @@ SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe" -set broker_database_name=test_run -SET receiver_folder="c:\tmp\asapo\receiver\files" +SET beamtime_id=asapo_test +SET beamline=test +SET receiver_root_folder=c:\tmp\asapo\receiver\files +SET receiver_folder="%receiver_root_folder%\%beamline%\%beamtime_id%" + + +"%3" token -secret broker_secret.key %beamtime_id% > token +set /P token=< token + set proxy_address="127.0.0.1:8400" -echo db.%broker_database_name%.insert({dummy:1}) | %mongo_exe% %broker_database_name% +echo db.%beamtime_id%.insert({dummy:1}) | %mongo_exe% %beamtime_id% c:\opt\consul\nomad run receiver.nmd +c:\opt\consul\nomad run authorizer.nmd c:\opt\consul\nomad run discovery.nmd c:\opt\consul\nomad run broker.nmd c:\opt\consul\nomad run nginx.nmd @@ -14,11 +22,11 @@ ping 1.0.0.0 -n 10 -w 100 > nul REM producer mkdir %receiver_folder% -start /B "" "%1" %proxy_address% 100 1000 4 0 +start /B "" "%1" %proxy_address% %beamtime_id% 100 1000 4 0 100 ping 1.0.0.0 -n 1 -w 100 > nul REM worker -"%2" %proxy_address% %broker_database_name% 2 | findstr /c:"Processed 1000 file(s)" || goto :error +"%2" %proxy_address% %beamtime_id% 2 %token% | findstr /c:"Processed 1000 file(s)" || goto :error goto :clean @@ -31,8 +39,10 @@ exit /b 1 c:\opt\consul\nomad stop receiver c:\opt\consul\nomad stop discovery c:\opt\consul\nomad stop broker +c:\opt\consul\nomad stop authorizer c:\opt\consul\nomad stop nginx -rmdir /S /Q %receiver_folder% -echo db.dropDatabase() | %mongo_exe% %broker_database_name% +rmdir /S /Q %receiver_root_folder% +del /f token +echo db.dropDatabase() | %mongo_exe% %beamtime_id% diff --git a/tests/automatic/producer_receiver/check_monitoring/check_linux.sh b/tests/automatic/producer_receiver/check_monitoring/check_linux.sh index 936ca16de475b8e2a744afa28eb84507862ac532..ff0af0589a529b76b0afaf0522aca3be4f701645 100644 --- a/tests/automatic/producer_receiver/check_monitoring/check_linux.sh +++ b/tests/automatic/producer_receiver/check_monitoring/check_linux.sh @@ -1,8 +1,10 @@ #!/usr/bin/env bash database_name=db_test -mongo_database_name=test_run -receiver_folder=/tmp/asapo/receiver/files +beamtime_id=asapo_test +beamline=test +receiver_root_folder=/tmp/asapo/receiver/files +receiver_folder=${receiver_root_folder}/${beamline}/${beamtime_id} set -e trap Cleanup EXIT @@ -12,23 +14,26 @@ Cleanup() { influx -execute "drop database ${database_name}" nomad stop receiver nomad stop discovery + nomad stop authorizer nomad stop nginx - echo "db.dropDatabase()" | mongo ${mongo_database_name} - rm -rf ${receiver_folder} + echo "db.dropDatabase()" | mongo ${beamtime_id} + rm -rf ${receiver_root_folder} } mkdir -p ${receiver_folder} influx -execute "create database ${database_name}" +nomad run authorizer.nmd nomad run receiver.nmd nomad run discovery.nmd nomad run nginx.nmd sleep 1 -$1 localhost:8400 100 112 4 0 +$1 localhost:8400 ${beamtime_id} 100 112 4 0 100 sleep 1 -influx -execute "select sum(n_requests) from statistics" -database=${database_name} -format=json | jq .results[0].series[0].values[0][1] | grep 112 +# should be 116 requests (112 data transfers and 4 authorizations) +influx -execute "select sum(n_requests) from statistics" -database=${database_name} -format=json | jq .results[0].series[0].values[0][1] | grep 116 diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh index d2b4f5f5ae318c4517fa0d5db643dfaf9d9085d9..972324483c226905c385cb6d51031366f4b52555 100644 --- a/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh +++ b/tests/automatic/producer_receiver/transfer_single_file/check_linux.sh @@ -5,29 +5,36 @@ set -e trap Cleanup EXIT database_name=db_test -mongo_database_name=test_run -receiver_folder=/tmp/asapo/receiver/files +beamtime_id=asapo_test +beamline=test +receiver_root_folder=/tmp/asapo/receiver/files +receiver_folder=${receiver_root_folder}/${beamline}/${beamtime_id} Cleanup() { echo cleanup - rm -rf ${receiver_folder} + rm -rf ${receiver_root_folder} nomad stop receiver nomad stop discovery + nomad stop authorizer nomad stop nginx - echo "db.dropDatabase()" | mongo ${mongo_database_name} + echo "db.dropDatabase()" | mongo ${beamtime_id} influx -execute "drop database ${database_name}" } influx -execute "create database ${database_name}" -echo "db.${mongo_database_name}.insert({dummy:1})" | mongo ${mongo_database_name} +# create db before worker starts reading it. todo: git rid of it +echo "db.${beamtime_id}.insert({dummy:1})" | mongo ${beamtime_id} +nomad run authorizer.nmd nomad run nginx.nmd nomad run receiver.nmd nomad run discovery.nmd mkdir -p ${receiver_folder} -$1 localhost:8400 100 1 1 0 - +$1 localhost:8400 ${beamtime_id} 100 1 1 0 30 ls -ln ${receiver_folder}/1.bin | awk '{ print $5 }'| grep 102400 + + +$1 localhost:8400 wrong_beamtime_id 100 1 1 0 1 2>1 | grep "authorization failed" \ No newline at end of file diff --git a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat index cb26780f3fa028d4de4f62daf5f57750aca7571d..7a453b7b1663cb1aa81dc8c261079215cb836202 100644 --- a/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat +++ b/tests/automatic/producer_receiver/transfer_single_file/check_windows.bat @@ -1,11 +1,15 @@ SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe" -SET database_name=test_run -SET receiver_folder="c:\tmp\asapo\receiver\files" +SET beamtime_id=asapo_test +SET beamline=test +SET receiver_root_folder=c:\tmp\asapo\receiver\files +SET receiver_folder="%receiver_root_folder%\%beamline%\%beamtime_id%" -echo db.%database_name%.insert({dummy:1})" | %mongo_exe% %database_name% + +echo db.%beamtime_id%.insert({dummy:1})" | %mongo_exe% %beamtime_id% c:\opt\consul\nomad run receiver.nmd +c:\opt\consul\nomad run authorizer.nmd c:\opt\consul\nomad run discovery.nmd c:\opt\consul\nomad run nginx.nmd @@ -13,13 +17,15 @@ ping 1.0.0.0 -n 1 -w 100 > nul mkdir %receiver_folder% -%1 localhost:8400 100 1 1 0 +"%1" localhost:8400 %beamtime_id% 100 1 1 0 30 ping 1.0.0.0 -n 1 -w 100 > nul FOR /F "usebackq" %%A IN ('%receiver_folder%\1.bin') DO set size=%%~zA if %size% NEQ 102400 goto :error +"%1" localhost:8400 wrong_id 100 1 1 0 2 2>1 | findstr /c:"authorization failed" || goto :error + goto :clean :error @@ -30,7 +36,8 @@ exit /b 1 c:\opt\consul\nomad stop receiver c:\opt\consul\nomad stop discovery c:\opt\consul\nomad stop nginx -rmdir /S /Q %receiver_folder% -echo db.dropDatabase() | %mongo_exe% %database_name% +c:\opt\consul\nomad stop authorizer +rmdir /S /Q %receiver_root_folder% +echo db.dropDatabase() | %mongo_exe% %beamtime_id% diff --git a/tests/automatic/settings/authorizer_settings.json.tpl b/tests/automatic/settings/authorizer_settings.json.tpl new file mode 100644 index 0000000000000000000000000000000000000000..052462d535427c936717390da210fd9df3867e7e --- /dev/null +++ b/tests/automatic/settings/authorizer_settings.json.tpl @@ -0,0 +1,7 @@ +{ + "Port": {{ env "NOMAD_PORT_authorizer" }}, + "LogLevel":"debug", + "AlwaysAllowedBeamtimes":[{"BeamtimeId":"asapo_test","Beamline":"test"}] +} + + diff --git a/tests/automatic/settings/broker_secret.key b/tests/automatic/settings/broker_secret.key new file mode 100644 index 0000000000000000000000000000000000000000..1d100e0ec247d5df6a06e5029a392b93b2a6fbe2 --- /dev/null +++ b/tests/automatic/settings/broker_secret.key @@ -0,0 +1 @@ +12ljzgneasfd diff --git a/tests/automatic/settings/broker_settings.json b/tests/automatic/settings/broker_settings.json index a5ccb0752b80b81e16794fce6f1b6388288ecbc8..ab8984917ca02329d8abcc1c928e09c9db745486 100644 --- a/tests/automatic/settings/broker_settings.json +++ b/tests/automatic/settings/broker_settings.json @@ -3,5 +3,6 @@ "MonitorDbAddress": "localhost:8086", "MonitorDbName": "db_test", "port":5005, - "LogLevel":"info" + "LogLevel":"info", + "SecretFile":"broker_secret.key" } \ No newline at end of file diff --git a/tests/automatic/settings/broker_settings.json.tpl b/tests/automatic/settings/broker_settings.json.tpl index af6d1dcb2492b7cacf45268b761a92e20ab0521e..2716cc6e332e9dbf7e0e0f30e2ebb741f613ae80 100644 --- a/tests/automatic/settings/broker_settings.json.tpl +++ b/tests/automatic/settings/broker_settings.json.tpl @@ -3,5 +3,6 @@ "MonitorDbAddress": "localhost:8086", "MonitorDbName": "db_test", "port":{{ env "NOMAD_PORT_broker" }}, - "LogLevel":"info" + "LogLevel":"info", + "SecretFile":"broker_secret.key" } \ No newline at end of file diff --git a/tests/automatic/settings/nginx.conf.tpl b/tests/automatic/settings/nginx.conf.tpl index 88b81082f43ab60f599b7483a23ae5deb70aa16c..a545307b376e004d2a0d6e5cbad696996b6a4136 100644 --- a/tests/automatic/settings/nginx.conf.tpl +++ b/tests/automatic/settings/nginx.conf.tpl @@ -19,6 +19,7 @@ http { server { listen {{ env "NOMAD_PORT_nginx" }}; set $discovery_endpoint discovery.service.asapo; + set $authorizer_endpoint authorizer.service.asapo; # set $fluentd_endpoint localhost; location /discovery/ { rewrite ^/discovery(/.*) $1 break; @@ -28,6 +29,11 @@ http { # rewrite ^/logs(/.*) $1 break; proxy_pass http://localhost:9880/asapo; } + location /authorizer/ { + rewrite ^/authorizer(/.*) $1 break; + proxy_pass http://$authorizer_endpoint:5007$uri; + } + location /nginx-health { return 200 "healthy\n"; diff --git a/tests/automatic/settings/receiver.json.tpl.lin b/tests/automatic/settings/receiver.json.tpl.lin index 8d98fd1abee361de21a2b4de1d87bd2fa20f277b..399449cdd0a67ae7612ded0ddce5d133dc11ec71 100644 --- a/tests/automatic/settings/receiver.json.tpl.lin +++ b/tests/automatic/settings/receiver.json.tpl.lin @@ -2,7 +2,8 @@ "MonitorDbAddress":"localhost:8086", "MonitorDbName": "db_test", "BrokerDbAddress":"localhost:27017", - "BrokerDbName": "test_run", + "AuthorizationServer": "localhost:8400/authorizer", + "AuthorizationInterval": 10000, "ListenPort": {{ env "NOMAD_PORT_recv" }}, "Tag": "{{ env "NOMAD_ADDR_recv" }}", "WriteToDisk":true, diff --git a/tests/automatic/settings/receiver.json.tpl.win b/tests/automatic/settings/receiver.json.tpl.win index d4cb5e03853d46bdce0e0fcb19dd7eb016c2ce28..58cacee7c378bb2b5dff3c8f150d62324db112b1 100644 --- a/tests/automatic/settings/receiver.json.tpl.win +++ b/tests/automatic/settings/receiver.json.tpl.win @@ -2,7 +2,8 @@ "MonitorDbAddress":"localhost:8086", "MonitorDbName": "db_test", "BrokerDbAddress":"localhost:27017", - "BrokerDbName": "test_run", + "AuthorizationServer": "localhost:8400/authorizer", + "AuthorizationInterval": 10000, "ListenPort": {{ env "NOMAD_PORT_recv" }}, "Tag": "{{ env "NOMAD_ADDR_recv" }}", "WriteToDisk":true, diff --git a/tests/automatic/worker/next_multithread_broker/check_linux.sh b/tests/automatic/worker/next_multithread_broker/check_linux.sh index c5a52cbb37b40d257309f329049f3b3da5b72470..308bd1ff6292bd29c86ee6e772f0a66a3e59d0e7 100644 --- a/tests/automatic/worker/next_multithread_broker/check_linux.sh +++ b/tests/automatic/worker/next_multithread_broker/check_linux.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash database_name=test_run +token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo= set -e @@ -26,6 +27,6 @@ do echo 'db.data.insert({"_id":'$i',"size":100,"name":"'$i'","lastchange":1})' | mongo ${database_name} done -$@ 127.0.0.1:8400 $database_name 4 10 +$@ 127.0.0.1:8400 $database_name 4 10 $token_test_run diff --git a/tests/automatic/worker/next_multithread_broker/check_windows.bat b/tests/automatic/worker/next_multithread_broker/check_windows.bat index b3762c8a856d534d4f92590f68b5820e09265ab1..1a5d84ae4b0a135898a378226673c0f2a258d39b 100644 --- a/tests/automatic/worker/next_multithread_broker/check_windows.bat +++ b/tests/automatic/worker/next_multithread_broker/check_windows.bat @@ -1,5 +1,6 @@ SET database_name=test_run SET mongo_exe="c:\Program Files\MongoDB\Server\3.6\bin\mongo.exe" +set token_test_run=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo= ::first argument path to the executable @@ -12,7 +13,7 @@ ping 1.0.0.0 -n 10 -w 100 > nul for /l %%x in (1, 1, 10) do echo db.data.insert({"_id":%%x,"size":100,"name":"%%x","lastchange":1}) | %mongo_exe% %database_name% || goto :error -%1 127.0.0.1:8400 %database_name% 4 10 || goto :error +%1 127.0.0.1:8400 %database_name% 4 10 %token_test_run% || goto :error goto :clean diff --git a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp b/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp index 5f8321ec4a58fc5fcc9319377a34cf98033e67bb..ffd3f90f5b5ab791586d89f3045ac60032b2d47e 100644 --- a/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp +++ b/tests/automatic/worker/next_multithread_broker/next_multithread_broker.cpp @@ -28,12 +28,13 @@ void Assert(std::vector<asapo::FileInfos> file_infos, int nthreads, int nfiles) struct Args { std::string server; std::string run_name; + std::string token; int nthreads; int nfiles; }; Args GetArgs(int argc, char* argv[]) { - if (argc != 5) { + if (argc != 6) { std::cout << "Wrong number of arguments" << std::endl; exit(EXIT_FAILURE); } @@ -41,12 +42,14 @@ Args GetArgs(int argc, char* argv[]) { std::string source_name{argv[2]}; int nthreads = std::stoi(argv[3]); int nfiles = std::stoi(argv[4]); - return Args{server, source_name, nthreads, nfiles}; + std::string token{argv[5]}; + + return Args{server, source_name, token, nthreads, nfiles}; } void GetAllFromBroker(const Args& args) { asapo::Error err; - auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, args.run_name, &err); + auto broker = asapo::DataBrokerFactory::CreateServerBroker(args.server, args.run_name, args.token, &err); std::vector<asapo::FileInfos>file_infos(args.nthreads); auto exec_next = [&](int i) { @@ -54,6 +57,7 @@ void GetAllFromBroker(const Args& args) { while ((err = broker->GetNext(&fi, nullptr)) == nullptr) { file_infos[i].emplace_back(fi); } + printf("%s\n", err->Explain().c_str()); }; std::vector<std::thread> threads; diff --git a/tests/manual/performance_broker/settings.json b/tests/manual/performance_broker/settings.json index a2c1a4a5ab7238e14c26667e5bfc7335e935d96d..a687e733f917c9b674dc2a82352e1b9dd07eb47e 100644 --- a/tests/manual/performance_broker/settings.json +++ b/tests/manual/performance_broker/settings.json @@ -3,5 +3,6 @@ "MonitorDbAddress": "localhost:8086", "MonitorDbName": "db_test", "port":5005, - "LogLevel":"info" + "LogLevel":"info", + "SecretFile":"broker_secret.key" } \ No newline at end of file diff --git a/tests/manual/performance_broker/test.sh b/tests/manual/performance_broker/test.sh index 9c36fce459ccb7f53f587fdaed07cbe13fa36bb1..66e361c73ad28ec3c9ace267bcb831cba2d06be5 100755 --- a/tests/manual/performance_broker/test.sh +++ b/tests/manual/performance_broker/test.sh @@ -4,10 +4,12 @@ # reads fileset into database # calls getnext_broker example from $worker_node -nthreads=16 +nthreads=1 # a directory with many files in it dir=/gpfs/petra3/scratch/yakubov/test -run_name=test +run_name=test_run +token=K38Mqc90iRv8fC7prcFHd994mF_wfUiJnWBfIjIzieo= + service_node=max-wgs monitor_node=zitpcx27016 @@ -42,6 +44,11 @@ ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./asapo-discovery -config scp settings_tmp.json ${service_node}:${service_dir}/settings.json + +scp ../../../tests/automatic/settings/broker_secret.key ${service_node}:${service_dir}/broker_secret.key + + + rm settings_tmp.json scp ../../../cmake-build-release/broker/asapo-broker ${service_node}:${service_dir} ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./asapo-broker -config settings.json &> ${service_dir}/broker.log &'" @@ -52,7 +59,7 @@ ssh ${worker_node} ${worker_dir}/folder2db -n ${nthreads} ${dir} ${run_name} ${s sleep 3 scp ../../../cmake-build-release/examples/worker/getnext_broker/getnext_broker ${worker_node}:${worker_dir} -ssh ${worker_node} ${worker_dir}/getnext_broker ${service_node}:8400 ${run_name} ${nthreads} +ssh ${worker_node} ${worker_dir}/getnext_broker ${service_node}:8400 ${run_name} ${nthreads} $token diff --git a/tests/manual/performance_full_chain_simple/authorizer.json b/tests/manual/performance_full_chain_simple/authorizer.json new file mode 100644 index 0000000000000000000000000000000000000000..64db1880ddfa86e7d478b7fbc8825744e4cfcb1d --- /dev/null +++ b/tests/manual/performance_full_chain_simple/authorizer.json @@ -0,0 +1,7 @@ +{ + "Port": 5007, + "LogLevel":"info", + "AlwaysAllowedBeamtimes":[{"BeamtimeId":"asapo_test","Beamline":"test"}] +} + + diff --git a/tests/manual/performance_full_chain_simple/broker.json b/tests/manual/performance_full_chain_simple/broker.json index a2c1a4a5ab7238e14c26667e5bfc7335e935d96d..a687e733f917c9b674dc2a82352e1b9dd07eb47e 100644 --- a/tests/manual/performance_full_chain_simple/broker.json +++ b/tests/manual/performance_full_chain_simple/broker.json @@ -3,5 +3,6 @@ "MonitorDbAddress": "localhost:8086", "MonitorDbName": "db_test", "port":5005, - "LogLevel":"info" + "LogLevel":"info", + "SecretFile":"broker_secret.key" } \ No newline at end of file diff --git a/tests/manual/performance_full_chain_simple/receiver.json b/tests/manual/performance_full_chain_simple/receiver.json index adf89f8ee2e32fef2d2ea21ca5e3d271a6071d65..8974d937b402f5622452a00944d9431561151b65 100644 --- a/tests/manual/performance_full_chain_simple/receiver.json +++ b/tests/manual/performance_full_chain_simple/receiver.json @@ -2,7 +2,8 @@ "MonitorDbAddress":"localhost:8086", "MonitorDbName": "db_test", "BrokerDbAddress":"localhost:27017", - "BrokerDbName": "test_run", + "AuthorizationServer": "localhost:5007", + "AuthorizationInterval": 10000, "ListenPort":4200, "WriteToDisk":true, "WriteToDb":true, diff --git a/tests/manual/performance_full_chain_simple/test.sh b/tests/manual/performance_full_chain_simple/test.sh index 3ddd186fb746e14c1b473bba8973e0127721ac3a..73ac1fd4a1f17d9d237328643fe6797e61e9f314 100755 --- a/tests/manual/performance_full_chain_simple/test.sh +++ b/tests/manual/performance_full_chain_simple/test.sh @@ -7,13 +7,18 @@ trap Cleanup EXIT #clean-up Cleanup() { set +e -ssh ${receiver_node} rm -f ${receiver_dir}/files/* +ssh ${receiver_node} rm -f ${receiver_dir}/files/${beamline}/${beamtime_id}/* ssh ${receiver_node} killall receiver ssh ${receiver_node} killall asapo-discovery +ssh ${receiver_node} killall asapo-authorizer ssh ${broker_node} killall asapo-broker ssh ${broker_node} docker rm -f -v mongo } +beamtime_id=asapo_test +beamline=test + + #monitoring_setup monitor_node=zitpcx27016 monitor_port=8086 @@ -27,13 +32,14 @@ log_dir=~/fullchain_tests/logs file_size=10000 file_num=$((10000000 / $file_size)) +#file_num=$((100000 / $file_size)) echo filesize: ${file_size}K, filenum: $file_num # receiver_setup receiver_node=max-wgs receiver_port=4201 receiver_dir=/gpfs/petra3/scratch/yakubov/receiver_tests -ssh ${receiver_node} mkdir -p ${receiver_dir}/files +ssh ${receiver_node} mkdir -p ${receiver_dir}/files/${beamline}/${beamtime_id} scp ../../../cmake-build-release/receiver/receiver ${receiver_node}:${receiver_dir} cat receiver.json | jq "to_entries | @@ -49,6 +55,10 @@ cat receiver.json | scp receiver_tmp.json ${receiver_node}:${receiver_dir}/receiver.json rm receiver_tmp.json +#authorizer_setup +scp ../../../cmake-build-release/authorizer/asapo-authorizer ${receiver_node}:${receiver_dir} +scp authorizer.json ${receiver_node}:${receiver_dir}/authorizer.json + # discovery_setup discovery_port=5006 @@ -102,6 +112,8 @@ worker_node=max-display002 worker_dir=~/fullchain_tests nthreads=16 scp ../../../cmake-build-release/examples/worker/getnext_broker/getnext_broker ${worker_node}:${worker_dir} +scp ../../../cmake-build-release/asapo_tools/asapo ${worker_node}:${worker_dir} +scp ../../../tests/automatic/settings/broker_secret.key ${worker_node}:${worker_dir}/broker_secret.key #monitoring_start ssh ${monitor_node} influx -execute \"create database db_test\" @@ -114,6 +126,9 @@ ssh ${broker_node} docker run -d -p 27017:27017 --name mongo mongo ssh ${receiver_node} "bash -c 'cd ${receiver_dir}; nohup ./asapo-discovery -config discovery.json &> ${log_dir}/discovery.log &'" sleep 0.3 +#authorizer_start +ssh ${receiver_node} "bash -c 'cd ${receiver_dir}; nohup ./asapo-authorizer -config authorizer.json &> ${log_dir}/log.authorizer &'" + #receiver_start ssh ${receiver_node} "bash -c 'cd ${receiver_dir}; nohup ./receiver receiver.json &> ${log_dir}/log.receiver &'" sleep 0.3 @@ -125,10 +140,13 @@ sleep 0.3 sleep 5 #producer_start -ssh ${producer_node} "bash -c 'cd ${producer_dir}; nohup ./dummy-data-producer ${receiver_node}:8400 ${file_size} ${file_num} ${producer_nthreads} 0 &> ${log_dir}/producer.log &'" +ssh ${producer_node} "bash -c 'cd ${producer_dir}; nohup ./dummy-data-producer ${receiver_node}:8400 ${beamtime_id} ${file_size} ${file_num} ${producer_nthreads} 0 100 &> ${log_dir}/producer.log &'" sleep 1 +#prepare token +ssh ${worker_node} "bash -c '${worker_dir}/asapo token -secret ${worker_dir}/broker_secret.key ${beamtime_id} >${worker_dir}/token'" #worker_start -ssh ${worker_node} ${worker_dir}/getnext_broker ${receiver_node}:8400 test_run ${nthreads} +ssh ${worker_node} "bash -c '${worker_dir}/getnext_broker ${receiver_node}:8400 ${beamtime_id} ${nthreads} \`cat ${worker_dir}/token\`'" + diff --git a/tests/manual/performance_producer_receiver/authorizer.json b/tests/manual/performance_producer_receiver/authorizer.json new file mode 100644 index 0000000000000000000000000000000000000000..64db1880ddfa86e7d478b7fbc8825744e4cfcb1d --- /dev/null +++ b/tests/manual/performance_producer_receiver/authorizer.json @@ -0,0 +1,7 @@ +{ + "Port": 5007, + "LogLevel":"info", + "AlwaysAllowedBeamtimes":[{"BeamtimeId":"asapo_test","Beamline":"test"}] +} + + diff --git a/tests/manual/performance_producer_receiver/receiver.json b/tests/manual/performance_producer_receiver/receiver.json index adf89f8ee2e32fef2d2ea21ca5e3d271a6071d65..8974d937b402f5622452a00944d9431561151b65 100644 --- a/tests/manual/performance_producer_receiver/receiver.json +++ b/tests/manual/performance_producer_receiver/receiver.json @@ -2,7 +2,8 @@ "MonitorDbAddress":"localhost:8086", "MonitorDbName": "db_test", "BrokerDbAddress":"localhost:27017", - "BrokerDbName": "test_run", + "AuthorizationServer": "localhost:5007", + "AuthorizationInterval": 10000, "ListenPort":4200, "WriteToDisk":true, "WriteToDb":true, diff --git a/tests/manual/performance_producer_receiver/test.sh b/tests/manual/performance_producer_receiver/test.sh index 53f9dfe6f417defc8eab078a206221cea07f4d52..faf2493473db989acb220f61099d80c9fa0a848c 100755 --- a/tests/manual/performance_producer_receiver/test.sh +++ b/tests/manual/performance_producer_receiver/test.sh @@ -6,9 +6,10 @@ trap Cleanup EXIT Cleanup() { set +e -ssh ${service_node} rm -f ${service_dir}/files/* +ssh ${service_node} rm -f ${service_dir}/files/${beamline}/${beamtime_id}/* ssh ${service_node} killall receiver ssh ${service_node} killall asapo-discovery +ssh ${service_node} killall asapo-authorizer ssh ${service_node} docker rm -f -v mongo } @@ -21,6 +22,9 @@ service_node=max-wgs service_ip=`resolveip -s ${service_node}` discovery_port=5006 receiver_port=4201 +beamtime_id=asapo_test +beamline=test + monitor_node=zitpcx27016 monitor_port=8086 @@ -36,11 +40,15 @@ ssh ${monitor_node} influx -execute \"create database db_test\" #ssh ${monitor_node} docker run -d -p 8086 -p 8086 --name influxdb influxdb ssh ${service_node} mkdir -p ${service_dir} -ssh ${service_node} mkdir -p ${service_dir}/files +ssh ${service_node} mkdir -p ${service_dir}/files/${beamtime_id} ssh ${worker_node} mkdir -p ${worker_dir} scp ../../../cmake-build-release/receiver/receiver ${service_node}:${service_dir} scp ../../../cmake-build-release/discovery/asapo-discovery ${service_node}:${service_dir} + +scp ../../../cmake-build-release/authorizer/asapo-authorizer ${service_node}:${service_dir} +scp authorizer.json ${service_node}:${service_dir}/authorizer.json + scp ../../../cmake-build-release/examples/producer/dummy-data-producer/dummy-data-producer ${worker_node}:${worker_dir} function do_work { @@ -73,7 +81,10 @@ cat discovery_tmp.json | jq ".Receiver.StaticEndpoints = [\"${service_node}:${re scp discovery_tmp1.json ${service_node}:${service_dir}/discovery.json scp receiver_tmp.json ${service_node}:${service_dir}/receiver.json + + rm discovery_tmp*.json receiver_tmp.json +ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./asapo-authorizer -config authorizer.json &> ${service_dir}/authorizer.log &'" ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./receiver receiver.json &> ${service_dir}/receiver.log &'" ssh ${service_node} "bash -c 'cd ${service_dir}; nohup ./asapo-discovery -config discovery.json &> ${service_dir}/discovery.log &'" @@ -82,15 +93,16 @@ for size in 100 1000 10000 do ssh ${service_node} docker run -d -p 27017:27017 --name mongo mongo echo =================================================================== -ssh ${worker_node} ${worker_dir}/dummy-data-producer ${service_ip}:8400 ${size} 1000 8 0 +ssh ${worker_node} ${worker_dir}/dummy-data-producer ${service_ip}:8400 ${beamtime_id} ${size} 10000 8 0 100 if [ "$1" == "true" ] then - ssh ${service_node} rm -f ${service_dir}/files/* + ssh ${service_node} rm -f ${service_dir}/files/${beamline}/${beamtime_id}/* fi ssh ${service_node} docker rm -f -v mongo done ssh ${service_node} killall receiver ssh ${service_node} killall asapo-discovery +ssh ${service_node} killall asapo-authorizer } echo diff --git a/worker/api/cpp/include/worker/data_broker.h b/worker/api/cpp/include/worker/data_broker.h index d916e629dd16daa6056f45763abd7f76cfc8623a..bc03ec02dff9d4cce16f01c6f65ca8db792c759a 100644 --- a/worker/api/cpp/include/worker/data_broker.h +++ b/worker/api/cpp/include/worker/data_broker.h @@ -21,6 +21,7 @@ auto const kNotFound = "Uri not found"; auto const kPermissionDenied = "Permissionn Denied"; auto const kNoData = "No Data"; auto const kWrongInput = "Wrong Input"; +auto const kAuthorizationError = "authorization error"; auto const kInternalError = "Internal Error"; auto const kUnknownIOError = "Unknown IO Error"; } @@ -47,8 +48,8 @@ class DataBrokerFactory { public: static std::unique_ptr<DataBroker> CreateFolderBroker(const std::string& source_name, Error* error) noexcept; - static std::unique_ptr<DataBroker> CreateServerBroker(const std::string& server_name, - const std::string& source_name, + static std::unique_ptr<DataBroker> CreateServerBroker(std::string server_name, + std::string beamtime_id, std::string token, Error* error) noexcept; }; diff --git a/worker/api/cpp/src/data_broker.cpp b/worker/api/cpp/src/data_broker.cpp index 624e60ed55261d7856e45ed772ef64f5b5c1e78e..0e833b2ac2982de2c858f4ae8c5278403e2869c3 100644 --- a/worker/api/cpp/src/data_broker.cpp +++ b/worker/api/cpp/src/data_broker.cpp @@ -11,7 +11,6 @@ std::unique_ptr<DataBroker> Create(const std::string& source_name, Args&& ... args) noexcept { if (source_name.empty()) { error->reset(new SimpleError("Empty Data Source")); - //*return_code = WorkerErrorMessage::kEmptyDatasource; return nullptr; } @@ -21,7 +20,6 @@ std::unique_ptr<DataBroker> Create(const std::string& source_name, error->reset(nullptr); } catch (...) { // we do not test this part error->reset(new SimpleError("Memory error")); -// *return_code = WorkerErrorMessage::kMemoryError; } return p; @@ -33,10 +31,10 @@ std::unique_ptr<DataBroker> DataBrokerFactory::CreateFolderBroker(const std::str return Create<FolderDataBroker>(source_name, error); }; -std::unique_ptr<DataBroker> DataBrokerFactory::CreateServerBroker(const std::string& server_name, - const std::string& source_name, +std::unique_ptr<DataBroker> DataBrokerFactory::CreateServerBroker(std::string server_name, + std::string beamtime_id, std::string token, Error* error) noexcept { - return Create<ServerDataBroker>(server_name, error, source_name); + return Create<ServerDataBroker>(std::move(server_name), error, std::move(beamtime_id), std::move(token)); } diff --git a/worker/api/cpp/src/server_data_broker.cpp b/worker/api/cpp/src/server_data_broker.cpp index 2859b63e297cce2fefe8704121dabc99b01d631f..f517a770ffcbc1a0c39de35d035166c76921bd33 100644 --- a/worker/api/cpp/src/server_data_broker.cpp +++ b/worker/api/cpp/src/server_data_broker.cpp @@ -20,6 +20,9 @@ Error HttpCodeToWorkerError(const HttpCode& code) { case HttpCode::BadRequest: message = WorkerErrorMessage::kWrongInput; break; + case HttpCode::Unauthorized: + message = WorkerErrorMessage::kAuthorizationError; + break; case HttpCode::InternalServerError: message = WorkerErrorMessage::kErrorReadingSource; break; @@ -36,10 +39,11 @@ Error HttpCodeToWorkerError(const HttpCode& code) { return Error{new HttpError(message, code)}; } -ServerDataBroker::ServerDataBroker(const std::string& server_uri, - const std::string& source_name) : +ServerDataBroker::ServerDataBroker(std::string server_uri, + std::string source_name, + std::string token) : io__{GenerateDefaultIO()}, httpclient__{DefaultHttpClient()}, - server_uri_{server_uri}, source_name_{source_name} { + server_uri_{std::move(server_uri)}, source_name_{std::move(source_name)}, token_{std::move(token)} { } Error ServerDataBroker::Connect() { @@ -75,10 +79,14 @@ void ServerDataBroker::ProcessServerError(Error* err, const std::string& respons return; } +std::string ServerDataBroker::RequestWithToken(std::string uri) { + return std::move(uri) + "?token=" + token_; +} + Error ServerDataBroker::ProcessRequest(std::string* response, std::string request_uri) { Error err; HttpCode code; - *response = httpclient__->Get(request_uri, &code, &err); + *response = httpclient__->Get(RequestWithToken(request_uri), &code, &err); if (err != nullptr) { return err; } diff --git a/worker/api/cpp/src/server_data_broker.h b/worker/api/cpp/src/server_data_broker.h index 43ccfeb7be55df5a95c8748eada7604a79986f5b..5c666dafb3bea845e70bc9465d63da0a4923a9c8 100644 --- a/worker/api/cpp/src/server_data_broker.h +++ b/worker/api/cpp/src/server_data_broker.h @@ -12,13 +12,14 @@ Error HttpCodeToWorkerError(const HttpCode& code); class ServerDataBroker final : public asapo::DataBroker { public: - explicit ServerDataBroker(const std::string& server_uri, const std::string& source_name); + explicit ServerDataBroker(std::string server_uri, std::string source_name, std::string token); Error Connect() override; Error GetNext(FileInfo* info, FileData* data) override; void SetTimeout(uint64_t timeout_ms) override; std::unique_ptr<IO> io__; // modified in testings to mock system calls,otherwise do not touch std::unique_ptr<HttpClient> httpclient__; private: + std::string RequestWithToken(std::string uri); Error GetFileInfoFromServer(FileInfo* info, const std::string& operation); Error GetBrokerUri(); void ProcessServerError(Error* err, const std::string& response, std::string* redirect_uri); @@ -26,6 +27,7 @@ class ServerDataBroker final : public asapo::DataBroker { std::string server_uri_; std::string current_broker_uri_; std::string source_name_; + std::string token_; uint64_t timeout_ms_ = 0; }; diff --git a/worker/api/cpp/unittests/test_server_broker.cpp b/worker/api/cpp/unittests/test_server_broker.cpp index bf8c4748d341220be3a9a48e5d60d421228d856a..58e24e8eb57d448fa65ecec72ce283d9fa740575 100644 --- a/worker/api/cpp/unittests/test_server_broker.cpp +++ b/worker/api/cpp/unittests/test_server_broker.cpp @@ -38,12 +38,12 @@ using testing::AllOf; namespace { TEST(FolderDataBroker, SetCorrectIo) { - auto data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "dbname")}; + auto data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "beamtime_id", "token")}; ASSERT_THAT(dynamic_cast<asapo::SystemIO*>(data_broker->io__.get()), Ne(nullptr)); } TEST(FolderDataBroker, SetCorrectHttpClient) { - auto data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "dbname")}; + auto data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker("test", "beamtime_id", "token")}; ASSERT_THAT(dynamic_cast<asapo::CurlHttpClient*>(data_broker->httpclient__.get()), Ne(nullptr)); } @@ -56,9 +56,10 @@ class ServerDataBrokerTests : public Test { FileInfo info; std::string expected_server_uri = "test:8400"; std::string expected_broker_uri = "broker:5005"; + std::string expected_token = "token"; void SetUp() override { - data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker(expected_server_uri, "dbname")}; + data_broker = std::unique_ptr<ServerDataBroker> {new ServerDataBroker(expected_server_uri, "beamtime_id", expected_token)}; data_broker->io__ = std::unique_ptr<IO> {&mock_io}; data_broker->httpclient__ = std::unique_ptr<asapo::HttpClient> {&mock_http_client}; } @@ -97,10 +98,11 @@ TEST_F(ServerDataBrokerTests, GetNextReturnsErrorOnWrongInput) { TEST_F(ServerDataBrokerTests, GetNextUsesCorrectUri) { MockGetBrokerUri(); - EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/dbname/next", _, _)).WillOnce(DoAll( - SetArgPointee<1>(HttpCode::OK), - SetArgPointee<2>(nullptr), - Return(""))); + EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/next?token=" + expected_token, _, + _)).WillOnce(DoAll( + SetArgPointee<1>(HttpCode::OK), + SetArgPointee<2>(nullptr), + Return(""))); data_broker->GetNext(&info, nullptr); } @@ -108,7 +110,6 @@ TEST_F(ServerDataBrokerTests, GetNextUsesCorrectUri) { TEST_F(ServerDataBrokerTests, GetNextReturnsEOFFromHttpClient) { MockGetBrokerUri(); - EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll( SetArgPointee<1>(HttpCode::Conflict), SetArgPointee<2>(nullptr), @@ -120,6 +121,21 @@ TEST_F(ServerDataBrokerTests, GetNextReturnsEOFFromHttpClient) { ASSERT_THAT(err->Explain(), HasSubstr("timeout")); } +TEST_F(ServerDataBrokerTests, GetNextReturnsNotAuthorized) { + MockGetBrokerUri(); + + EXPECT_CALL(mock_http_client, Get_t(HasSubstr("next"), _, _)).WillOnce(DoAll( + SetArgPointee<1>(HttpCode::Unauthorized), + SetArgPointee<2>(nullptr), + Return(""))); + + auto err = data_broker->GetNext(&info, nullptr); + + ASSERT_THAT(err, Ne(nullptr)); + ASSERT_THAT(err->Explain(), HasSubstr("authorization")); +} + + TEST_F(ServerDataBrokerTests, GetNextReturnsWrongResponseFromHttpClient) { MockGetBrokerUri(); @@ -186,7 +202,7 @@ TEST_F(ServerDataBrokerTests, GetNextReturnsEOFFromHttpClientUntilTimeout) { SetArgPointee<2>(nullptr), Return("{\"id\":1}"))); - EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/dbname/1", _, + EXPECT_CALL(mock_http_client, Get_t(expected_broker_uri + "/database/beamtime_id/1?token=" + expected_token, _, _)).Times(AtLeast(1)).WillRepeatedly(DoAll( SetArgPointee<1>(HttpCode::Conflict), SetArgPointee<2>(nullptr), diff --git a/worker/api/cpp/unittests/test_worker_api.cpp b/worker/api/cpp/unittests/test_worker_api.cpp index 4b36ea3941e6366b95a9e409ed45eefd37c1c6d0..6af41b9530d4a7ec9acae32d23ed0e9d013698e8 100644 --- a/worker/api/cpp/unittests/test_worker_api.cpp +++ b/worker/api/cpp/unittests/test_worker_api.cpp @@ -39,14 +39,13 @@ TEST_F(DataBrokerFactoryTests, FailCreateDataSourceWithEmptySource) { auto data_broker = DataBrokerFactory::CreateFolderBroker("", &error); -// ASSERT_THAT(error->Explain(), Eq(WorkerErrorMessage::kEmptyDatasource)); ASSERT_THAT(error->Explain(), Eq("Empty Data Source")); ASSERT_THAT(data_broker.get(), Eq(nullptr)); } TEST_F(DataBrokerFactoryTests, CreateServerDataSource) { - auto data_broker = DataBrokerFactory::CreateServerBroker("server", "database", &error); + auto data_broker = DataBrokerFactory::CreateServerBroker("server", "beamtime_id", "token", &error); ASSERT_THAT(error, Eq(nullptr)); ASSERT_THAT(dynamic_cast<ServerDataBroker*>(data_broker.get()), Ne(nullptr));