Skip to content
Snippets Groups Projects
Commit 7abb8f95 authored by Mikhail Karnevskiy's avatar Mikhail Karnevskiy
Browse files

Merge branch 'fix_create_producer_consumer_docstring' into 'develop'

Fix doctstring for create_consumer/create_producer

See merge request asapo/asapo!210
parents 05c67ed4 42d8f05c
No related branches found
No related tags found
No related merge requests found
......@@ -448,8 +448,8 @@ cdef class PyConsumer:
if op == "next":
with nogil:
dataset = self.c_consumer.get().GetNextDataset(b_group_id, min_size,b_stream,&err)
elif op == "next_available":
dataset = self.c_consumer.get().GetNextDataset(b_group_id, min_size,b_stream,True,&err)
elif op == "next_available":
dataset = self.c_consumer.get().GetNextDataset(b_group_id, min_size,b_stream,True,&err)
elif op == "last" and group_id == "":
with nogil:
dataset = self.c_consumer.get().GetLastDataset(min_size,b_stream, &err)
......@@ -468,9 +468,9 @@ cdef class PyConsumer:
return res
def get_next_dataset(self, group_id, min_size = 0, stream = "default", ordered=True):
""" Return dataset, as a list of substream-messages metadata, that corresponds to different
substreams of a given dataset. Each metadata is a python dict, that
substreams of a given dataset. Each metadata is a python dict, that
contains user part (if given) and service part. Corresponding data can be retrieved
via `retrieve_data` function.
via `retrieve_data` function.
Function iterates across stream for given consumer group (create one, if not exists).
For each consumer group each message will be returned only once.
......@@ -492,7 +492,7 @@ cdef class PyConsumer:
if ordered:
return self._op_dataset("next_available",group_id,stream,min_size,0)
else:
return self._op_dataset("next",group_id,stream,min_size,0)
return self._op_dataset("next",group_id,stream,min_size,0)
def get_last_dataset(self, min_size = 0, stream = "default", group_id = ""):
return self._op_dataset("last",group_id,stream,min_size,0)
def get_dataset_by_id(self, uint64_t id, min_size = 0, stream = "default"):
......@@ -552,14 +552,12 @@ def create_consumer(server_name,source_path,has_filesystem,beamtime_id,data_sour
:type source_path: string
:param has_filesystem: True if the source_path is accessible for client, otherwise will use file asapo transfer service to get data
:type has_filesystem: bool
:param beamline: beamline name, can be "auto" if beamtime_id is given
:type beamline: string
:param beamtime_id: ID of the beamtime the data belongs to
:type beamtime_id: string
:param data_source: name of the data source that produces data
:type data_source: string
:param token: authorization token
:type token: string
:param nthreads: ingest mode flag
:type nthreads: int
:param timeout_ms: send requests timeout in milliseconds
:type timeout_ms: int
:param instance_id: instance id, can be "auto" (default), will create a combination out of hostname and pid
......
......@@ -472,6 +472,8 @@ def create_producer(endpoint,type,beamtime_id,beamline,data_source,token,nthread
:type endpoint: string
:param type: source type, "raw" to write to "raw" folder in beamline filesystem,"processed" to write to "processed" folder in core filesystem
:type type: string
:param beamtime_id: ID of the beamtime the data belongs to, can be "auto" if beamline is given
:type beamtime_id: string
:param beamline: beamline name, can be "auto" if beamtime_id is given
:type beamline: string
:param data_source: name of the data source that produces data
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment