mirror of
https://github.com/clearml/clearml
synced 2025-06-26 18:16:07 +00:00
Merge branch 'master' of https://github.com/allegroai/clearml
This commit is contained in:
commit
815906ec64
@ -912,7 +912,7 @@ class ScriptInfo(object):
|
|||||||
|
|
||||||
# noinspection PyBroadException
|
# noinspection PyBroadException
|
||||||
try:
|
try:
|
||||||
with open(script_path, 'r') as f:
|
with open(script_path, 'r', encoding='utf-8') as f:
|
||||||
script_code = f.read()
|
script_code = f.read()
|
||||||
return script_code
|
return script_code
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@ -342,11 +342,22 @@ class BaseModel(object):
|
|||||||
@property
|
@property
|
||||||
def published(self):
|
def published(self):
|
||||||
# type: () -> bool
|
# type: () -> bool
|
||||||
|
"""
|
||||||
|
Get the published state of this model.
|
||||||
|
|
||||||
|
:return:
|
||||||
|
|
||||||
|
"""
|
||||||
return self._get_base_model().locked
|
return self._get_base_model().locked
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def framework(self):
|
def framework(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
The ML framework of the model (for example: PyTorch, TensorFlow, XGBoost, etc.).
|
||||||
|
|
||||||
|
:return: The model's framework
|
||||||
|
"""
|
||||||
return self._get_model_data().framework
|
return self._get_model_data().framework
|
||||||
|
|
||||||
def __init__(self, task=None):
|
def __init__(self, task=None):
|
||||||
@ -2136,6 +2147,11 @@ class OutputModel(BaseModel):
|
|||||||
@property
|
@property
|
||||||
def upload_storage_uri(self):
|
def upload_storage_uri(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
The URI of the storage destination for uploaded model weight files.
|
||||||
|
|
||||||
|
:return: The URI string
|
||||||
|
"""
|
||||||
return self._get_base_model().upload_storage_uri
|
return self._get_base_model().upload_storage_uri
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|||||||
@ -52,7 +52,7 @@ class ResourceMonitor(BackgroundMonitor):
|
|||||||
try:
|
try:
|
||||||
active_gpus = os.environ.get('NVIDIA_VISIBLE_DEVICES', '') or \
|
active_gpus = os.environ.get('NVIDIA_VISIBLE_DEVICES', '') or \
|
||||||
os.environ.get('CUDA_VISIBLE_DEVICES', '')
|
os.environ.get('CUDA_VISIBLE_DEVICES', '')
|
||||||
if active_gpus != "all":
|
if active_gpus and active_gpus != "all":
|
||||||
self._active_gpus = [g.strip() for g in active_gpus.split(',')]
|
self._active_gpus = [g.strip() for g in active_gpus.split(',')]
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@ -389,7 +389,7 @@ class ResourceMonitor(BackgroundMonitor):
|
|||||||
if self._gpustat:
|
if self._gpustat:
|
||||||
gpu_stat = self._gpustat.new_query(shutdown=True, get_driver_info=True)
|
gpu_stat = self._gpustat.new_query(shutdown=True, get_driver_info=True)
|
||||||
if gpu_stat.gpus:
|
if gpu_stat.gpus:
|
||||||
gpus = [g for i, g in enumerate(gpu_stat.gpus) if not self._active_gpus or i in self._active_gpus]
|
gpus = [g for i, g in enumerate(gpu_stat.gpus) if not self._skip_nonactive_gpu(i, g)]
|
||||||
specs.update(
|
specs.update(
|
||||||
gpu_count=int(len(gpus)),
|
gpu_count=int(len(gpus)),
|
||||||
gpu_type=', '.join(g.name for g in gpus),
|
gpu_type=', '.join(g.name for g in gpus),
|
||||||
|
|||||||
@ -282,7 +282,10 @@
|
|||||||
"new_task.update_parameters({\"General/max_depth\": 3})\n",
|
"new_task.update_parameters({\"General/max_depth\": 3})\n",
|
||||||
"# We can even rename it if we wanted\n",
|
"# We can even rename it if we wanted\n",
|
||||||
"new_task.rename(f\"Cloned Task\")\n",
|
"new_task.rename(f\"Cloned Task\")\n",
|
||||||
"# Now enuque it for the colab worker to start working on it!\n",
|
"# Make sure that the diff does not contain Colab invocation!\n",
|
||||||
|
"# cf. https://github.com/allegroai/clearml/issues/1204\n",
|
||||||
|
"new_task.set_script(diff=\"pass\")\n",
|
||||||
|
"# Now enqueue it for the colab worker to start working on it!\n",
|
||||||
"Task.enqueue(task=new_task, queue_name=\"default\")"
|
"Task.enqueue(task=new_task, queue_name=\"default\")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -329,7 +332,7 @@
|
|||||||
"# Now we can set up a loop that waits until our task is done!\n",
|
"# Now we can set up a loop that waits until our task is done!\n",
|
||||||
"# If you have enabled notifications on Colab, it will even let you know\n",
|
"# If you have enabled notifications on Colab, it will even let you know\n",
|
||||||
"# when the ClearML task is done!\n",
|
"# when the ClearML task is done!\n",
|
||||||
"while new_task.status not in [\"success\", \"failed\"]:\n",
|
"while new_task.status not in [\"completed\", \"failed\"]:\n",
|
||||||
" if new_task.status == \"draft\":\n",
|
" if new_task.status == \"draft\":\n",
|
||||||
" print(\"Task is still in draft mode! You have to enqueue it before the agent can run it.\")\n",
|
" print(\"Task is still in draft mode! You have to enqueue it before the agent can run it.\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -452,4 +455,4 @@
|
|||||||
"nbformat": 4,
|
"nbformat": 4,
|
||||||
"nbformat_minor": 0
|
"nbformat_minor": 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user