Support order parameter in events.get_task_log

This commit is contained in:
allegroai 2020-08-10 08:37:41 +03:00
parent 3f84e60a1f
commit 42ba696518
4 changed files with 46 additions and 16 deletions

View File

@ -1,3 +1,4 @@
from enum import auto
from typing import Sequence, Optional
from jsonmodels import validators
@ -8,6 +9,7 @@ from jsonmodels.validators import Length, Min, Max
from apimodels import ListField, IntField, ActualEnumField
from bll.event.event_metrics import EventType
from bll.event.scalar_key import ScalarKeyEnum
from utilities.stringenum import StringEnum
class HistogramRequestBase(Base):
@ -40,11 +42,17 @@ class DebugImagesRequest(Base):
scroll_id: str = StringField()
class LogOrderEnum(StringEnum):
asc = auto()
desc = auto()
class LogEventsRequest(Base):
task: str = StringField(required=True)
batch_size: int = IntField(default=500)
navigate_earlier: bool = BoolField(default=True)
from_timestamp: Optional[int] = IntField()
order: Optional[str] = ActualEnumField(LogOrderEnum)
class IterationEvents(Base):

View File

@ -548,11 +548,16 @@
}
navigate_earlier {
type: boolean
description: "If set then log events are retreived from the latest to the earliest ones (in timestamp descending order). Otherwise from the earliest to the latest ones (in timestamp ascending order). The default is True"
description: "If set then log events are retreived from the latest to the earliest ones (in timestamp descending order, unless order='asc'). Otherwise from the earliest to the latest ones (in timestamp ascending order, unless order='desc'). The default is True"
}
from_timestamp {
type: number
description: "Epoch time in UTC ms to use as the navigation start"
description: "Epoch time in UTC ms to use as the navigation start. Optional. If not provided, reference timestamp is determined by the 'navigate_earlier' parameter (if true, reference timestamp is the last timestamp and if false, reference timestamp is the first timestamp)"
}
order {
type: string
description: "If set, changes the order in which log events are returned based on the value of 'navigate_earlier'"
enum: [asc, desc]
}
}
}

View File

@ -12,6 +12,7 @@ from apimodels.events import (
IterationEvents,
TaskMetricsRequest,
LogEventsRequest,
LogOrderEnum,
)
from bll.event import EventBLL
from bll.event.event_metrics import EventMetrics
@ -24,7 +25,7 @@ event_bll = EventBLL()
@endpoint("events.add")
def add(call: APICall, company_id, req_model):
def add(call: APICall, company_id, _):
data = call.data.copy()
allow_locked = data.pop("allow_locked", False)
added, err_count, err_info = event_bll.add_events(
@ -35,7 +36,7 @@ def add(call: APICall, company_id, req_model):
@endpoint("events.add_batch")
def add_batch(call: APICall, company_id, req_model):
def add_batch(call: APICall, company_id, _):
events = call.batched_data
if events is None or len(events) == 0:
raise errors.bad_request.BatchContainsNoItems()
@ -46,7 +47,7 @@ def add_batch(call: APICall, company_id, req_model):
@endpoint("events.get_task_log", required_fields=["task"])
def get_task_log_v1_5(call, company_id, req_model):
def get_task_log_v1_5(call, company_id, _):
task_id = call.data["task"]
task = task_bll.assert_exists(
company_id, task_id, allow_public=True, only=("company",)
@ -68,7 +69,7 @@ def get_task_log_v1_5(call, company_id, req_model):
@endpoint("events.get_task_log", min_version="1.7", required_fields=["task"])
def get_task_log_v1_7(call, company_id, req_model):
def get_task_log_v1_7(call, company_id, _):
task_id = call.data["task"]
task = task_bll.assert_exists(
company_id, task_id, allow_public=True, only=("company",)
@ -99,8 +100,8 @@ def get_task_log_v1_7(call, company_id, req_model):
@endpoint("events.get_task_log", min_version="2.9", request_data_model=LogEventsRequest)
def get_task_log(call, company_id, req_model: LogEventsRequest):
task_id = req_model.task
def get_task_log(call, company_id, request: LogEventsRequest):
task_id = request.task
task = task_bll.assert_exists(
company_id, task_id, allow_public=True, only=("company",)
)[0]
@ -108,11 +109,19 @@ def get_task_log(call, company_id, req_model: LogEventsRequest):
res = event_bll.log_events_iterator.get_task_events(
company_id=task.company,
task_id=task_id,
batch_size=req_model.batch_size,
navigate_earlier=req_model.navigate_earlier,
from_timestamp=req_model.from_timestamp,
batch_size=request.batch_size,
navigate_earlier=request.navigate_earlier,
from_timestamp=request.from_timestamp,
)
if (
request.order and (
(request.navigate_earlier and request.order == LogOrderEnum.asc)
or (not request.navigate_earlier and request.order == LogOrderEnum.desc)
)
):
res.events.reverse()
call.result.data = dict(
events=res.events, returned=len(res.events), total=res.total_events
)

View File

@ -235,9 +235,10 @@ class TestTaskEvents(TestService):
)
# test backwards navigation
self._assert_log_events(
task=task, timestamp=ftime, navigate_earlier=False
)
self._assert_log_events(task=task, timestamp=ftime, navigate_earlier=False)
# test order
self._assert_log_events(task=task, order="asc")
def _assert_log_events(
self,
@ -261,7 +262,10 @@ class TestTaskEvents(TestService):
self.assertEqual(len(res.events), unique_events)
if res.events:
cmp_operator = operator.ge
if not extra_params.get("navigate_earlier", True):
if (
not extra_params.get("navigate_earlier", True)
or extra_params.get("order", None) == "asc"
):
cmp_operator = operator.le
self.assertTrue(
all(
@ -270,7 +274,11 @@ class TestTaskEvents(TestService):
)
)
return (res.events[0].timestamp, res.events[-1].timestamp) if res.events else (None, None)
return (
(res.events[0].timestamp, res.events[-1].timestamp)
if res.events
else (None, None)
)
def test_task_metric_value_intervals_keys(self):
metric = "Metric1"