mirror of
https://github.com/clearml/clearml-server
synced 2025-06-26 23:15:47 +00:00
Fix rapidjson dumps does not support ensure_ascii, only Encoder initialization does
Add task enqueue status
This commit is contained in:
parent
7e03104f1c
commit
251ee57ffd
@ -668,10 +668,10 @@ class TaskBLL:
|
|||||||
|
|
||||||
return ChangeStatusRequest(
|
return ChangeStatusRequest(
|
||||||
task=task,
|
task=task,
|
||||||
new_status=TaskStatus.created,
|
new_status=task.enqueue_status or TaskStatus.created,
|
||||||
status_reason=status_reason,
|
status_reason=status_reason,
|
||||||
status_message=status_message,
|
status_message=status_message,
|
||||||
).execute()
|
).execute(enqueue_status=None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dequeue(cls, task: Task, company_id: str, silent_fail=False):
|
def dequeue(cls, task: Task, company_id: str, silent_fail=False):
|
||||||
|
@ -39,7 +39,14 @@ def archive_task(
|
|||||||
task = TaskBLL.get_task_with_access(
|
task = TaskBLL.get_task_with_access(
|
||||||
task,
|
task,
|
||||||
company_id=company_id,
|
company_id=company_id,
|
||||||
only=("id", "execution", "status", "project", "system_tags"),
|
only=(
|
||||||
|
"id",
|
||||||
|
"execution",
|
||||||
|
"status",
|
||||||
|
"project",
|
||||||
|
"system_tags",
|
||||||
|
"enqueue_status",
|
||||||
|
),
|
||||||
requires_write_access=True,
|
requires_write_access=True,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
@ -82,7 +89,7 @@ def enqueue_task(
|
|||||||
status_reason=status_reason,
|
status_reason=status_reason,
|
||||||
status_message=status_message,
|
status_message=status_message,
|
||||||
allow_same_state_transition=False,
|
allow_same_state_transition=False,
|
||||||
).execute()
|
).execute(enqueue_status=task.status)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
queue_bll.add_task(company_id=company_id, queue_id=queue_id, task_id=task.id)
|
queue_bll.add_task(company_id=company_id, queue_id=queue_id, task_id=task.id)
|
||||||
@ -94,7 +101,7 @@ def enqueue_task(
|
|||||||
new_status=task.status,
|
new_status=task.status,
|
||||||
force=True,
|
force=True,
|
||||||
status_reason="failed enqueueing",
|
status_reason="failed enqueueing",
|
||||||
).execute()
|
).execute(enqueue_status=None)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# set the current queue ID in the task
|
# set the current queue ID in the task
|
||||||
@ -220,7 +227,12 @@ def reset_task(
|
|||||||
status_reason="reset",
|
status_reason="reset",
|
||||||
status_message="reset",
|
status_message="reset",
|
||||||
).execute(
|
).execute(
|
||||||
started=None, completed=None, published=None, active_duration=None, **updates,
|
started=None,
|
||||||
|
completed=None,
|
||||||
|
published=None,
|
||||||
|
active_duration=None,
|
||||||
|
enqueue_status=None,
|
||||||
|
**updates,
|
||||||
)
|
)
|
||||||
|
|
||||||
return dequeued, cleaned_up, res
|
return dequeued, cleaned_up, res
|
||||||
|
@ -261,6 +261,9 @@ class Task(AttributedDocument):
|
|||||||
runtime = SafeDictField(default=dict)
|
runtime = SafeDictField(default=dict)
|
||||||
models: Models = EmbeddedDocumentField(Models, default=Models)
|
models: Models = EmbeddedDocumentField(Models, default=Models)
|
||||||
container = SafeMapField(field=StringField(default=""))
|
container = SafeMapField(field=StringField(default=""))
|
||||||
|
enqueue_status = StringField(
|
||||||
|
choices=get_options(TaskStatus), exclude_by_default=True
|
||||||
|
)
|
||||||
|
|
||||||
def get_index_company(self) -> str:
|
def get_index_company(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
@ -634,7 +634,8 @@ class APICall(DataContainer):
|
|||||||
}
|
}
|
||||||
if self.content_type.lower() == JSON_CONTENT_TYPE:
|
if self.content_type.lower() == JSON_CONTENT_TYPE:
|
||||||
try:
|
try:
|
||||||
res = json.dumps(res, **(self._json_flags or {}))
|
func = json.dumps if self._json_flags.pop("ensure_ascii", True) else json.dumps_notascii
|
||||||
|
res = func(res, **(self._json_flags or {}))
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
# JSON serialization may fail, probably problem with data or error_data so pop it and try again
|
# JSON serialization may fail, probably problem with data or error_data so pop it and try again
|
||||||
if not (self.result.data or self.result.error_data):
|
if not (self.result.data or self.result.error_data):
|
||||||
|
@ -893,7 +893,7 @@ def dequeue(call: APICall, company_id, request: UpdateRequest):
|
|||||||
task = TaskBLL.get_task_with_access(
|
task = TaskBLL.get_task_with_access(
|
||||||
request.task,
|
request.task,
|
||||||
company_id=company_id,
|
company_id=company_id,
|
||||||
only=("id", "execution", "status", "project"),
|
only=("id", "execution", "status", "project", "enqueue_status"),
|
||||||
requires_write_access=True,
|
requires_write_access=True,
|
||||||
)
|
)
|
||||||
res = DequeueResponse(
|
res = DequeueResponse(
|
||||||
@ -984,7 +984,7 @@ def archive(call: APICall, company_id, request: ArchiveRequest):
|
|||||||
tasks = TaskBLL.assert_exists(
|
tasks = TaskBLL.assert_exists(
|
||||||
company_id,
|
company_id,
|
||||||
task_ids=request.tasks,
|
task_ids=request.tasks,
|
||||||
only=("id", "execution", "status", "project", "system_tags"),
|
only=("id", "execution", "status", "project", "system_tags", "enqueue_status"),
|
||||||
)
|
)
|
||||||
archived = 0
|
archived = 0
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
|
@ -6,4 +6,5 @@ import rapidjson
|
|||||||
DATETIME_MODE = rapidjson.DM_ISO8601 | rapidjson.DM_NAIVE_IS_UTC
|
DATETIME_MODE = rapidjson.DM_ISO8601 | rapidjson.DM_NAIVE_IS_UTC
|
||||||
|
|
||||||
dumps = rapidjson.Encoder(datetime_mode=DATETIME_MODE)
|
dumps = rapidjson.Encoder(datetime_mode=DATETIME_MODE)
|
||||||
|
dumps_notascii = rapidjson.Encoder(datetime_mode=DATETIME_MODE, ensure_ascii=False)
|
||||||
loads = rapidjson.Decoder(datetime_mode=DATETIME_MODE)
|
loads = rapidjson.Decoder(datetime_mode=DATETIME_MODE)
|
||||||
|
Loading…
Reference in New Issue
Block a user