diff --git a/common/cpp/unittests/logger/test_fluentd_sink.cpp b/common/cpp/unittests/logger/test_fluentd_sink.cpp index be06bb02df35ad3407fa4fea9abc6bd37a190899..bb41891d592774e2a11e6c41299f67ee2638569d 100644 --- a/common/cpp/unittests/logger/test_fluentd_sink.cpp +++ b/common/cpp/unittests/logger/test_fluentd_sink.cpp @@ -41,7 +41,7 @@ class FluentdSinkTests : public Test { }; TEST_F(FluentdSinkTests, SendPost) { - EXPECT_CALL(mock_http_client, Post_t("test_url", HasSubstr("hello"), _, _, _)); + EXPECT_CALL(mock_http_client, Post_t("test_url",_, HasSubstr("hello"), _, _)); logger->info("hello"); } diff --git a/tests/automatic/consumer/consumer_api_python/check_linux.sh b/tests/automatic/consumer/consumer_api_python/check_linux.sh index 04f3c058ba108f46132e138160a9900abbeb1dd8..262d2ae2ff86ca041e8c20d2b4d44ad391c7fe42 100644 --- a/tests/automatic/consumer/consumer_api_python/check_linux.sh +++ b/tests/automatic/consumer/consumer_api_python/check_linux.sh @@ -38,6 +38,8 @@ do echo 'db.data_default.insert({"_id":'$i',"size":6,"name":"'$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${database_name} done +echo 'db.data_streamfts.insert({"_id":'1',"size":0,"name":"'1'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${database_name} + for i in `seq 1 5`; do echo 'db.data_stream1.insert({"_id":'$i',"size":6,"name":"'1$i'","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}})' | mongo ${database_name} diff --git a/tests/automatic/consumer/consumer_api_python/check_windows.bat b/tests/automatic/consumer/consumer_api_python/check_windows.bat index 1b0ade3d5327978c3a7c99f6024dfb13ab04fc04..360d0ef0a300b2d5a8a4548b088d2d74af878cd2 100644 --- a/tests/automatic/consumer/consumer_api_python/check_windows.bat +++ b/tests/automatic/consumer/consumer_api_python/check_windows.bat @@ -14,6 +14,8 @@ call start_services.bat for /l %%x in (1, 1, 5) do echo db.data_default.insert({"_id":%%x,"size":6,"name":"%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %database_name% || goto :error +db.data_default.insert({"_id":1,"size":0,"name":"1","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %database_name% || goto :error + for /l %%x in (1, 1, 5) do echo db.data_stream1.insert({"_id":%%x,"size":6,"name":"1%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %database_name% || goto :error for /l %%x in (1, 1, 5) do echo db.data_stream2.insert({"_id":%%x,"size":6,"name":"2%%x","lastchange":1,"source":"none","buf_id":0,"meta":{"test":10}}) | %mongo_exe% %database_name% || goto :error diff --git a/tests/automatic/consumer/consumer_api_python/consumer_api.py b/tests/automatic/consumer/consumer_api_python/consumer_api.py index 7016a8f46949892021398d1697023f12c7fee84b..179fd15327546302e11c023ad37357cf4521f304 100644 --- a/tests/automatic/consumer/consumer_api_python/consumer_api.py +++ b/tests/automatic/consumer/consumer_api_python/consumer_api.py @@ -36,6 +36,8 @@ def check_file_transfer_service(broker,group_id): broker.set_timeout(1000) data, meta = broker.get_by_id(1, group_id, meta_only=False) assert_eq(data.tostring().decode("utf-8"),"hello1","check_file_transfer_service ok") + data, meta = broker.get_by_id(1, group_id,"streamfts", meta_only=False) + assert_eq(data.tostring().decode("utf-8"),"hello1","check_file_transfer_service with auto size ok") def check_single(broker,group_id): @@ -121,7 +123,7 @@ def check_single(broker,group_id): assert_metaname(meta,"21","get next stream2") substreams = broker.get_substream_list() - assert_eq(len(substreams),3,"number of substreams") + assert_eq(len(substreams),4,"number of substreams") assert_eq(substreams[0],"default","substreams_name1") assert_eq(substreams[1],"stream1","substreams_name2") assert_eq(substreams[2],"stream2","substreams_name3") diff --git a/tests/manual/python_tests/producer/aaa b/tests/manual/python_tests/producer/aaa new file mode 100644 index 0000000000000000000000000000000000000000..ce013625030ba8dba906f756967f9e9ca394464a --- /dev/null +++ b/tests/manual/python_tests/producer/aaa @@ -0,0 +1 @@ +hello diff --git a/tests/manual/python_tests/producer/cons.py b/tests/manual/python_tests/producer/cons.py new file mode 100644 index 0000000000000000000000000000000000000000..04048887f5bdfd3163140981115716d9ba39c0d1 --- /dev/null +++ b/tests/manual/python_tests/producer/cons.py @@ -0,0 +1,20 @@ +from __future__ import print_function + +import asapo_consumer +import sys + +token="IEfwsWa0GXky2S3MkxJSUHJT1sI8DD5teRdjBUXVRxk=" +source="127.0.0.1:8400" +path="/tmp/petra3/gpfs/p01/2019/data/asapo_test" +beamtime="asapo_test" + +broker = asapo_consumer.create_server_broker(source,path,False, beamtime,"test",token,1000) +group_id = broker.generate_group_id() + +data, meta = broker.get_by_id(3,group_id,"default", meta_only=False) + +print (meta) +print (data.tostring() ) + + +sys.exit(0) \ No newline at end of file diff --git a/tests/manual/python_tests/producer/short_test.py b/tests/manual/python_tests/producer/short_test.py new file mode 100644 index 0000000000000000000000000000000000000000..849b22c359c3a2039c262a0974839a7a0330237b --- /dev/null +++ b/tests/manual/python_tests/producer/short_test.py @@ -0,0 +1,39 @@ +from __future__ import print_function + +import asapo_producer +import sys +import time +import numpy as np +import threading +lock = threading.Lock() + + +endpoint = "127.0.0.1:8400" +beamtime = "asapo_test" +stream = "test" +token = "" +nthreads = 8 + +def callback(header,err): + lock.acquire() # to print + if err is not None: + print("could not sent: ",header,err) + else: + print ("successfuly sent: ",header) + lock.release() + +def assert_err(err): + if err is not None: + print(err) + sys.exit(1) + +producer = asapo_producer.create_producer(endpoint,beamtime,'auto', stream, token, nthreads ,0) + +producer.set_log_level("debug") + +#send meta only +producer.send_file(3, local_path = "/tmp/petra3/gpfs/p01/2019/data/asapo_test",exposed_path = "producer/aaa", + ingest_mode = asapo_producer.INGEST_MODE_TRANSFER_METADATA_ONLY, callback = callback) + +producer.wait_requests_finished(1000) +