not tested - exit on cuda oom

This commit is contained in:
Eugen Ajechiloae
2024-08-14 19:43:44 +03:00
parent 6ef1f67ad0
commit 6a8e616256
4 changed files with 9 additions and 2 deletions

View File

@@ -917,6 +917,7 @@ class ModelRequestProcessor(object):
if k not in self._endpoints:
# atomic
self._engine_processor_lookup[k]._model = None
print("clearml-serving --id c1a4ebd2586040ad906cf338d16bcb87 model remove --endpoint test_model_sklearn")
gc.collect()
if hasattr(self._engine_processor_lookup[k]._preprocess, "unload"):
try: