Unify batch operations

This commit is contained in:
allegroai 2021-05-03 18:03:54 +03:00
parent 251ee57ffd
commit e2f265b4bc
11 changed files with 117 additions and 96 deletions

View File

@ -3,7 +3,7 @@ from typing import Sequence
from jsonmodels.models import Base from jsonmodels.models import Base
from jsonmodels.validators import Length from jsonmodels.validators import Length
from apiserver.apimodels import ListField from apiserver.apimodels import ListField, IntField
class BatchRequest(Base): class BatchRequest(Base):
@ -11,4 +11,5 @@ class BatchRequest(Base):
class BatchResponse(Base): class BatchResponse(Base):
succeeded: int = IntField()
failures: Sequence[dict] = ListField([dict]) failures: Sequence[dict] = ListField([dict])

View File

@ -55,12 +55,7 @@ class ModelsArchiveManyRequest(BatchRequest):
pass pass
class ModelsArchiveManyResponse(BatchResponse):
archived = fields.IntField(required=True)
class ModelsDeleteManyResponse(BatchResponse): class ModelsDeleteManyResponse(BatchResponse):
deleted = fields.IntField()
urls = fields.ListField([str]) urls = fields.ListField([str])
@ -84,7 +79,6 @@ class ModelsPublishManyRequest(BatchRequest):
class ModelsPublishManyResponse(BatchResponse): class ModelsPublishManyResponse(BatchResponse):
published = fields.IntField(required=True)
published_tasks = fields.ListField([ModelTaskPublishResponse]) published_tasks = fields.ListField([ModelTaskPublishResponse])

View File

@ -7,7 +7,7 @@ from jsonmodels.validators import Enum, Length
from apiserver.apimodels import DictField, ListField from apiserver.apimodels import DictField, ListField
from apiserver.apimodels.base import UpdateResponse from apiserver.apimodels.base import UpdateResponse
from apiserver.apimodels.batch import BatchRequest, BatchResponse from apiserver.apimodels.batch import BatchRequest
from apiserver.database.model.task.task import ( from apiserver.database.model.task.task import (
TaskType, TaskType,
ArtifactModes, ArtifactModes,
@ -241,26 +241,14 @@ class StopManyRequest(TaskBatchRequest):
force = BoolField(default=False) force = BoolField(default=False)
class StopManyResponse(BatchResponse):
stopped = IntField(required=True)
class ArchiveManyRequest(TaskBatchRequest): class ArchiveManyRequest(TaskBatchRequest):
pass pass
class ArchiveManyResponse(BatchResponse):
archived = IntField(required=True)
class EnqueueManyRequest(TaskBatchRequest): class EnqueueManyRequest(TaskBatchRequest):
queue = StringField() queue = StringField()
class EnqueueManyResponse(BatchResponse):
queued = IntField()
class DeleteManyRequest(TaskBatchRequest): class DeleteManyRequest(TaskBatchRequest):
move_to_trash = BoolField(default=True) move_to_trash = BoolField(default=True)
return_file_urls = BoolField(default=False) return_file_urls = BoolField(default=False)

View File

@ -106,7 +106,7 @@ def validate_status_change(current_status, new_status):
state_machine = { state_machine = {
TaskStatus.created: {TaskStatus.queued, TaskStatus.in_progress}, TaskStatus.created: {TaskStatus.queued, TaskStatus.in_progress},
TaskStatus.queued: {TaskStatus.created, TaskStatus.in_progress}, TaskStatus.queued: {TaskStatus.created, TaskStatus.in_progress, TaskStatus.stopped},
TaskStatus.in_progress: { TaskStatus.in_progress: {
TaskStatus.stopped, TaskStatus.stopped,
TaskStatus.failed, TaskStatus.failed,

View File

@ -1,7 +1,10 @@
import os
import re
from datetime import datetime from datetime import datetime
from pymongo.collection import Collection from pymongo.collection import Collection
from pymongo.database import Database from pymongo.database import Database
from pymongo.errors import DuplicateKeyError
from apiserver.database.model.task.task import TaskModelTypes, TaskModelNames from apiserver.database.model.task.task import TaskModelTypes, TaskModelNames
from apiserver.services.utils import escape_dict from apiserver.services.utils import escape_dict
@ -94,8 +97,39 @@ def _migrate_model_labels(db: Database):
tasks.update_one({"_id": doc["_id"]}, {"$set": set_commands}) tasks.update_one({"_id": doc["_id"]}, {"$set": set_commands})
def _migrate_project_names(db: Database):
projects: Collection = db["project"]
regx = re.compile("/", re.IGNORECASE)
for doc in projects.find(filter={"name": regx, "path": {"$in": [None, []]}}):
name = doc.get("name")
if not name:
continue
max_tries = int(os.getenv("CLEARML_MIGRATION_PROJECT_RENAME_MAX_TRIES", 10))
iteration = 0
for iteration in range(max_tries):
new_name = name.replace("/", "_" * (iteration + 1))
try:
projects.update_one(
{"_id": doc["_id"]},
{
"$set": {"name": new_name}
}
)
break
except DuplicateKeyError:
pass
if iteration >= max_tries - 1:
print(
f"Could not upgrade the name {name} of the project {doc.get('_id')}"
)
def migrate_backend(db: Database): def migrate_backend(db: Database):
_migrate_task_models(db) _migrate_task_models(db)
_migrate_docker_cmd(db) _migrate_docker_cmd(db)
_migrate_model_labels(db) _migrate_model_labels(db)
_migrate_project_names(db)
_drop_all_indices_from_collections(db, ["task*"]) _drop_all_indices_from_collections(db, ["task*"])

View File

@ -37,7 +37,6 @@ batch_operation {
required: [ids] required: [ids]
properties { properties {
ids { ids {
description: Entities to move
type: array type: array
items {type: string} items {type: string}
} }
@ -46,6 +45,9 @@ batch_operation {
response { response {
type: object type: object
properties { properties {
succeeded {
type: integer
}
failures { failures {
type: array type: array
items { items {

View File

@ -658,21 +658,21 @@ publish_many {
"2.13": ${_definitions.batch_operation} { "2.13": ${_definitions.batch_operation} {
description: Publish models description: Publish models
request { request {
force_publish_task { properties {
description: "Publish the associated tasks (if exist) even if they are not in the 'stopped' state. Optional, the default value is False." ids.description: "IDs of models to publish"
type: boolean force_publish_task {
} description: "Publish the associated tasks (if exist) even if they are not in the 'stopped' state. Optional, the default value is False."
publish_tasks { type: boolean
description: "Indicates that the associated tasks (if exist) should be published. Optional, the default value is True." }
type: boolean publish_tasks {
description: "Indicates that the associated tasks (if exist) should be published. Optional, the default value is True."
type: boolean
}
} }
} }
response { response {
properties { properties {
published { succeeded.description: "Number of models published"
description: "Number of models published"
type: integer
}
published_tasks { published_tasks {
type: array type: array
items: ${_definitions.published_task_item} items: ${_definitions.published_task_item}
@ -718,12 +718,14 @@ set_ready {
archive_many { archive_many {
"2.13": ${_definitions.batch_operation} { "2.13": ${_definitions.batch_operation} {
description: Archive models description: Archive models
request {
properties {
ids.description: "IDs of models to archive"
}
}
response { response {
properties { properties {
archived { succeeded.description: "Number of models archived"
description: "Number of models archived"
type: integer
}
} }
} }
} }
@ -732,18 +734,18 @@ delete_many {
"2.13": ${_definitions.batch_operation} { "2.13": ${_definitions.batch_operation} {
description: Delete models description: Delete models
request { request {
force { properties {
description: """Force. Required if there are tasks that use the model as an execution model, or if the model's creating task is published. ids.description: "IDs of models to delete"
force {
description: """Force. Required if there are tasks that use the model as an execution model, or if the model's creating task is published.
""" """
type: boolean type: boolean
}
} }
} }
response { response {
properties { properties {
deleted { succeeded.description: "Number of models deleted"
description: "Number of models deleted"
type: integer
}
urls { urls {
descrition: "The urls of the deleted model files" descrition: "The urls of the deleted model files"
type: array type: array

View File

@ -1421,6 +1421,7 @@ reset_many {
description: Reset tasks description: Reset tasks
request { request {
properties { properties {
ids.description: "IDs of the tasks to reset"
force = ${_references.force_arg} { force = ${_references.force_arg} {
description: "If not true, call fails if the task status is 'completed'" description: "If not true, call fails if the task status is 'completed'"
} }
@ -1441,10 +1442,7 @@ reset_many {
} }
response { response {
properties { properties {
reset { succeeded.description: "Number of tasks reset"
description: "Number of tasks reset"
type: integer
}
dequeued { dequeued {
description: "Number of tasks dequeued" description: "Number of tasks dequeued"
type: object type: object
@ -1467,6 +1465,7 @@ delete_many {
description: Delete tasks description: Delete tasks
request { request {
properties { properties {
ids.description: "IDs of the tasks to delete"
move_to_trash { move_to_trash {
description: "Move task to trash instead of deleting it. For internal use only, tasks in the trash are not visible from the API and cannot be restored!" description: "Move task to trash instead of deleting it. For internal use only, tasks in the trash are not visible from the API and cannot be restored!"
type: boolean type: boolean
@ -1487,10 +1486,7 @@ delete_many {
} }
response { response {
properties { properties {
deleted { succeeded.description: "Number of tasks deleted"
description: "Number of tasks deleted"
type: integer
}
updated_children { updated_children {
description: "Number of child tasks whose parent property was updated" description: "Number of child tasks whose parent property was updated"
type: integer type: integer
@ -1615,11 +1611,13 @@ archive {
archive_many { archive_many {
"2.13": ${_definitions.change_many_request} { "2.13": ${_definitions.change_many_request} {
description: Archive tasks description: Archive tasks
request {
properties {
ids.description: "IDs of the tasks to archive"
}
response { response {
properties { properties {
archived { succeeded.description: "Number of tasks archived"
description: "Number of tasks archived"
type: integer
} }
} }
} }
@ -1668,6 +1666,7 @@ stop_many {
description: "Request to stop running tasks" description: "Request to stop running tasks"
request { request {
properties { properties {
ids.description: "IDs of the tasks to stop"
force = ${_references.force_arg} { force = ${_references.force_arg} {
description: "If not true, call fails if the task status is not 'in_progress'" description: "If not true, call fails if the task status is not 'in_progress'"
} }
@ -1675,10 +1674,7 @@ stop_many {
} }
response { response {
properties { properties {
stopped { succeeded.description: "Number of tasks stopped"
description: "Number of tasks stopped"
type: integer
}
} }
} }
} }
@ -1754,6 +1750,7 @@ publish_many {
description: Publish tasks description: Publish tasks
request { request {
properties { properties {
ids.description: "IDs of the tasks to publish"
force = ${_references.force_arg} { force = ${_references.force_arg} {
description: "If not true, call fails if the task status is not 'stopped'" description: "If not true, call fails if the task status is not 'stopped'"
} }
@ -1765,10 +1762,7 @@ publish_many {
} }
response { response {
properties { properties {
published { succeeded.description: "Number of tasks published"
description: "Number of tasks published"
type: integer
}
} }
} }
} }
@ -1812,12 +1806,18 @@ Fails if the following parameters in the task were not filled:
enqueue_many { enqueue_many {
"2.13": ${_definitions.change_many_request} { "2.13": ${_definitions.change_many_request} {
description: Enqueue tasks description: Enqueue tasks
request {
properties {
ids.description: "IDs of the tasks to enqueue"
queue {
description: "Queue id. If not provided, tasks are added to the default queue."
type: string
}
}
}
response { response {
properties { properties {
enqueued { succeeded.description: "Number of tasks enqueued"
description: "Number of tasks enqueued"
type: integer
}
} }
} }
} }

View File

@ -9,6 +9,7 @@ from apiserver import database
from apiserver.apierrors import errors from apiserver.apierrors import errors
from apiserver.apierrors.errors.bad_request import InvalidModelId from apiserver.apierrors.errors.bad_request import InvalidModelId
from apiserver.apimodels.base import UpdateResponse, MakePublicRequest, MoveRequest from apiserver.apimodels.base import UpdateResponse, MakePublicRequest, MoveRequest
from apiserver.apimodels.batch import BatchResponse
from apiserver.apimodels.models import ( from apiserver.apimodels.models import (
CreateModelRequest, CreateModelRequest,
CreateModelResponse, CreateModelResponse,
@ -24,7 +25,6 @@ from apiserver.apimodels.models import (
ModelsDeleteManyRequest, ModelsDeleteManyRequest,
ModelsDeleteManyResponse, ModelsDeleteManyResponse,
ModelsArchiveManyRequest, ModelsArchiveManyRequest,
ModelsArchiveManyResponse,
) )
from apiserver.bll.model import ModelBLL from apiserver.bll.model import ModelBLL
from apiserver.bll.organization import OrgBLL, Tags from apiserver.bll.organization import OrgBLL, Tags
@ -533,7 +533,7 @@ def publish_many(call: APICall, company_id, request: ModelsPublishManyRequest):
) )
call.result.data_model = ModelsPublishManyResponse( call.result.data_model = ModelsPublishManyResponse(
published=res.published, published_tasks=res.published_tasks, failures=failures, succeeded=res.published, published_tasks=res.published_tasks, failures=failures,
) )
@ -581,14 +581,14 @@ def delete(call: APICall, company_id, request: ModelsDeleteManyRequest):
res.urls.discard(None) res.urls.discard(None)
call.result.data_model = ModelsDeleteManyResponse( call.result.data_model = ModelsDeleteManyResponse(
deleted=res.deleted, urls=list(res.urls), failures=failures, succeeded=res.deleted, urls=list(res.urls), failures=failures,
) )
@endpoint( @endpoint(
"models.archive_many", "models.archive_many",
request_data_model=ModelsArchiveManyRequest, request_data_model=ModelsArchiveManyRequest,
response_data_model=ModelsArchiveManyResponse, response_data_model=BatchResponse,
) )
def archive_many(call: APICall, company_id, request: ModelsArchiveManyRequest): def archive_many(call: APICall, company_id, request: ModelsArchiveManyRequest):
archived, failures = run_batch_operation( archived, failures = run_batch_operation(
@ -596,9 +596,7 @@ def archive_many(call: APICall, company_id, request: ModelsArchiveManyRequest):
ids=request.ids, ids=request.ids,
init_res=0, init_res=0,
) )
call.result.data_model = ModelsArchiveManyResponse( call.result.data_model = BatchResponse(succeeded=archived, failures=failures)
archived=archived, failures=failures,
)
@endpoint("models.make_public", min_version="2.9", request_data_model=MakePublicRequest) @endpoint("models.make_public", min_version="2.9", request_data_model=MakePublicRequest)

View File

@ -17,6 +17,7 @@ from apiserver.apimodels.base import (
MakePublicRequest, MakePublicRequest,
MoveRequest, MoveRequest,
) )
from apiserver.apimodels.batch import BatchResponse
from apiserver.apimodels.tasks import ( from apiserver.apimodels.tasks import (
StartedResponse, StartedResponse,
ResetResponse, ResetResponse,
@ -47,13 +48,10 @@ from apiserver.apimodels.tasks import (
ArchiveRequest, ArchiveRequest,
AddUpdateModelRequest, AddUpdateModelRequest,
DeleteModelsRequest, DeleteModelsRequest,
StopManyResponse,
StopManyRequest, StopManyRequest,
EnqueueManyRequest, EnqueueManyRequest,
EnqueueManyResponse,
ResetManyRequest, ResetManyRequest,
ArchiveManyRequest, ArchiveManyRequest,
ArchiveManyResponse,
DeleteManyRequest, DeleteManyRequest,
PublishManyRequest, PublishManyRequest,
) )
@ -299,7 +297,7 @@ class StopRes:
@endpoint( @endpoint(
"tasks.stop_many", "tasks.stop_many",
request_data_model=StopManyRequest, request_data_model=StopManyRequest,
response_data_model=StopManyResponse, response_data_model=BatchResponse,
) )
def stop_many(call: APICall, company_id, request: StopManyRequest): def stop_many(call: APICall, company_id, request: StopManyRequest):
res, failures = run_batch_operation( res, failures = run_batch_operation(
@ -313,7 +311,7 @@ def stop_many(call: APICall, company_id, request: StopManyRequest):
ids=request.ids, ids=request.ids,
init_res=StopRes(), init_res=StopRes(),
) )
call.result.data_model = StopManyResponse(stopped=res.stopped, failures=failures) call.result.data_model = BatchResponse(succeeded=res.stopped, failures=failures)
@endpoint( @endpoint(
@ -867,7 +865,7 @@ class EnqueueRes:
@endpoint( @endpoint(
"tasks.enqueue_many", "tasks.enqueue_many",
request_data_model=EnqueueManyRequest, request_data_model=EnqueueManyRequest,
response_data_model=EnqueueManyResponse, response_data_model=BatchResponse,
) )
def enqueue_many(call: APICall, company_id, request: EnqueueManyRequest): def enqueue_many(call: APICall, company_id, request: EnqueueManyRequest):
res, failures = run_batch_operation( res, failures = run_batch_operation(
@ -881,7 +879,7 @@ def enqueue_many(call: APICall, company_id, request: EnqueueManyRequest):
ids=request.ids, ids=request.ids,
init_res=EnqueueRes(), init_res=EnqueueRes(),
) )
call.result.data_model = EnqueueManyResponse(queued=res.queued, failures=failures) call.result.data_model = BatchResponse(succeeded=res.queued, failures=failures)
@endpoint( @endpoint(
@ -971,7 +969,7 @@ def reset_many(call: APICall, company_id, request: ResetManyRequest):
else: else:
cleanup_res = {} cleanup_res = {}
call.result.data = dict( call.result.data = dict(
reset=res.reset, dequeued=res.dequeued, **cleanup_res, failures=failures, succeeded=res.reset, dequeued=res.dequeued, **cleanup_res, failures=failures,
) )
@ -1001,7 +999,7 @@ def archive(call: APICall, company_id, request: ArchiveRequest):
@endpoint( @endpoint(
"tasks.archive_many", "tasks.archive_many",
request_data_model=ArchiveManyRequest, request_data_model=ArchiveManyRequest,
response_data_model=ArchiveManyResponse, response_data_model=BatchResponse,
) )
def archive_many(call: APICall, company_id, request: ArchiveManyRequest): def archive_many(call: APICall, company_id, request: ArchiveManyRequest):
archived, failures = run_batch_operation( archived, failures = run_batch_operation(
@ -1014,7 +1012,7 @@ def archive_many(call: APICall, company_id, request: ArchiveManyRequest):
ids=request.ids, ids=request.ids,
init_res=0, init_res=0,
) )
call.result.data_model = ArchiveManyResponse(archived=archived, failures=failures) call.result.data_model = BatchResponse(succeeded=archived, failures=failures)
@endpoint("tasks.delete", request_data_model=DeleteRequest) @endpoint("tasks.delete", request_data_model=DeleteRequest)
@ -1067,7 +1065,7 @@ def delete_many(call: APICall, company_id, request: DeleteManyRequest):
_reset_cached_tags(company_id, projects=list(res.projects)) _reset_cached_tags(company_id, projects=list(res.projects))
cleanup_res = attr.asdict(res.cleanup_res) if res.cleanup_res else {} cleanup_res = attr.asdict(res.cleanup_res) if res.cleanup_res else {}
call.result.data = dict(deleted=res.deleted, **cleanup_res, failures=failures) call.result.data = dict(succeeded=res.deleted, **cleanup_res, failures=failures)
@endpoint( @endpoint(
@ -1095,7 +1093,11 @@ class PublishRes:
return PublishRes(published=self.published + 1) return PublishRes(published=self.published + 1)
@endpoint("tasks.publish_many", request_data_model=PublishManyRequest) @endpoint(
"tasks.publish_many",
request_data_model=PublishManyRequest,
response_data_model=BatchResponse,
)
def publish_many(call: APICall, company_id, request: PublishManyRequest): def publish_many(call: APICall, company_id, request: PublishManyRequest):
res, failures = run_batch_operation( res, failures = run_batch_operation(
func=partial( func=partial(
@ -1112,7 +1114,7 @@ def publish_many(call: APICall, company_id, request: PublishManyRequest):
init_res=PublishRes(), init_res=PublishRes(),
) )
call.result.data = dict(published=res.published, failures=failures) call.result.data_model = BatchResponse(succeeded=res.published, failures=failures)
@endpoint( @endpoint(

View File

@ -21,7 +21,7 @@ class TestBatchOperations(TestService):
# enqueue # enqueue
res = self.api.tasks.enqueue_many(ids=ids) res = self.api.tasks.enqueue_many(ids=ids)
self.assertEqual(res.queued, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.tasks.get_all_ex(id=ids).tasks data = self.api.tasks.get_all_ex(id=ids).tasks
self.assertEqual({t.status for t in data}, {"queued"}) self.assertEqual({t.status for t in data}, {"queued"})
@ -30,14 +30,14 @@ class TestBatchOperations(TestService):
for t in tasks: for t in tasks:
self.api.tasks.started(task=t) self.api.tasks.started(task=t)
res = self.api.tasks.stop_many(ids=ids) res = self.api.tasks.stop_many(ids=ids)
self.assertEqual(res.stopped, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.tasks.get_all_ex(id=ids).tasks data = self.api.tasks.get_all_ex(id=ids).tasks
self.assertEqual({t.status for t in data}, {"stopped"}) self.assertEqual({t.status for t in data}, {"stopped"})
# publish # publish
res = self.api.tasks.publish_many(ids=ids, publish_model=False) res = self.api.tasks.publish_many(ids=ids, publish_model=False)
self.assertEqual(res.published, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.tasks.get_all_ex(id=ids).tasks data = self.api.tasks.get_all_ex(id=ids).tasks
self.assertEqual({t.status for t in data}, {"published"}) self.assertEqual({t.status for t in data}, {"published"})
@ -46,7 +46,7 @@ class TestBatchOperations(TestService):
res = self.api.tasks.reset_many( res = self.api.tasks.reset_many(
ids=ids, delete_output_models=True, return_file_urls=True, force=True ids=ids, delete_output_models=True, return_file_urls=True, force=True
) )
self.assertEqual(res.reset, 2) self.assertEqual(res.succeeded, 2)
self.assertEqual(res.deleted_models, 2) self.assertEqual(res.deleted_models, 2)
self.assertEqual(set(res.urls.model_urls), {"uri_0", "uri_1"}) self.assertEqual(set(res.urls.model_urls), {"uri_0", "uri_1"})
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
@ -55,7 +55,7 @@ class TestBatchOperations(TestService):
# archive # archive
res = self.api.tasks.archive_many(ids=ids) res = self.api.tasks.archive_many(ids=ids)
self.assertEqual(res.archived, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.tasks.get_all_ex(id=ids).tasks data = self.api.tasks.get_all_ex(id=ids).tasks
self.assertTrue(all("archived" in t.system_tags for t in data)) self.assertTrue(all("archived" in t.system_tags for t in data))
@ -64,7 +64,7 @@ class TestBatchOperations(TestService):
res = self.api.tasks.delete_many( res = self.api.tasks.delete_many(
ids=ids, delete_output_models=True, return_file_urls=True ids=ids, delete_output_models=True, return_file_urls=True
) )
self.assertEqual(res.deleted, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.tasks.get_all_ex(id=ids).tasks data = self.api.tasks.get_all_ex(id=ids).tasks
self.assertEqual(data, []) self.assertEqual(data, [])
@ -84,13 +84,13 @@ class TestBatchOperations(TestService):
res = self.api.models.publish_many( res = self.api.models.publish_many(
ids=ids, publish_task=True, force_publish_task=True ids=ids, publish_task=True, force_publish_task=True
) )
self.assertEqual(res.published, 1) self.assertEqual(res.succeeded, 1)
self.assertEqual(res.published_tasks[0].id, task) self.assertEqual(res.published_tasks[0].id, task)
self._assert_failures(res, [ids[1], missing_id]) self._assert_failures(res, [ids[1], missing_id])
# archive # archive
res = self.api.models.archive_many(ids=ids) res = self.api.models.archive_many(ids=ids)
self.assertEqual(res.archived, 2) self.assertEqual(res.succeeded, 2)
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.models.get_all_ex(id=ids).models data = self.api.models.get_all_ex(id=ids).models
for m in data: for m in data:
@ -98,7 +98,7 @@ class TestBatchOperations(TestService):
# delete # delete
res = self.api.models.delete_many(ids=[*models, missing_id], force=True) res = self.api.models.delete_many(ids=[*models, missing_id], force=True)
self.assertEqual(res.deleted, 2) self.assertEqual(res.succeeded, 2)
self.assertEqual(set(res.urls), set(uris)) self.assertEqual(set(res.urls), set(uris))
self._assert_failures(res, [missing_id]) self._assert_failures(res, [missing_id])
data = self.api.models.get_all_ex(id=ids).models data = self.api.models.get_all_ex(id=ids).models