This commit is contained in:
revital 2024-03-06 09:08:32 +02:00
commit 815906ec64
4 changed files with 25 additions and 6 deletions

View File

@ -912,7 +912,7 @@ class ScriptInfo(object):
# noinspection PyBroadException
try:
with open(script_path, 'r') as f:
with open(script_path, 'r', encoding='utf-8') as f:
script_code = f.read()
return script_code
except Exception:

View File

@ -342,11 +342,22 @@ class BaseModel(object):
@property
def published(self):
# type: () -> bool
"""
Get the published state of this model.
:return:
"""
return self._get_base_model().locked
@property
def framework(self):
# type: () -> str
"""
The ML framework of the model (for example: PyTorch, TensorFlow, XGBoost, etc.).
:return: The model's framework
"""
return self._get_model_data().framework
def __init__(self, task=None):
@ -2136,6 +2147,11 @@ class OutputModel(BaseModel):
@property
def upload_storage_uri(self):
# type: () -> str
"""
The URI of the storage destination for uploaded model weight files.
:return: The URI string
"""
return self._get_base_model().upload_storage_uri
def __init__(

View File

@ -52,7 +52,7 @@ class ResourceMonitor(BackgroundMonitor):
try:
active_gpus = os.environ.get('NVIDIA_VISIBLE_DEVICES', '') or \
os.environ.get('CUDA_VISIBLE_DEVICES', '')
if active_gpus != "all":
if active_gpus and active_gpus != "all":
self._active_gpus = [g.strip() for g in active_gpus.split(',')]
except Exception:
pass
@ -389,7 +389,7 @@ class ResourceMonitor(BackgroundMonitor):
if self._gpustat:
gpu_stat = self._gpustat.new_query(shutdown=True, get_driver_info=True)
if gpu_stat.gpus:
gpus = [g for i, g in enumerate(gpu_stat.gpus) if not self._active_gpus or i in self._active_gpus]
gpus = [g for i, g in enumerate(gpu_stat.gpus) if not self._skip_nonactive_gpu(i, g)]
specs.update(
gpu_count=int(len(gpus)),
gpu_type=', '.join(g.name for g in gpus),

View File

@ -282,7 +282,10 @@
"new_task.update_parameters({\"General/max_depth\": 3})\n",
"# We can even rename it if we wanted\n",
"new_task.rename(f\"Cloned Task\")\n",
"# Now enuque it for the colab worker to start working on it!\n",
"# Make sure that the diff does not contain Colab invocation!\n",
"# cf. https://github.com/allegroai/clearml/issues/1204\n",
"new_task.set_script(diff=\"pass\")\n",
"# Now enqueue it for the colab worker to start working on it!\n",
"Task.enqueue(task=new_task, queue_name=\"default\")"
]
},
@ -329,7 +332,7 @@
"# Now we can set up a loop that waits until our task is done!\n",
"# If you have enabled notifications on Colab, it will even let you know\n",
"# when the ClearML task is done!\n",
"while new_task.status not in [\"success\", \"failed\"]:\n",
"while new_task.status not in [\"completed\", \"failed\"]:\n",
" if new_task.status == \"draft\":\n",
" print(\"Task is still in draft mode! You have to enqueue it before the agent can run it.\")\n",
"\n",
@ -452,4 +455,4 @@
"nbformat": 4,
"nbformat_minor": 0
}