mirror of
https://github.com/clearml/clearml
synced 2025-04-22 23:35:03 +00:00
Fix model not created in the current project
This commit is contained in:
parent
e090995ad8
commit
41218cee9b
@ -3228,7 +3228,9 @@ class PipelineDecorator(PipelineController):
|
|||||||
else:
|
else:
|
||||||
sleep(2)
|
sleep(2)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if _node.job.is_failed() or _node.job.is_aborted():
|
if _node.job.is_failed() or _node.job.is_aborted():
|
||||||
|
# noinspection PyProtectedMember
|
||||||
if cls._singleton._should_relaunch_node(_node):
|
if cls._singleton._should_relaunch_node(_node):
|
||||||
cls._singleton._task.get_logger().report_text(
|
cls._singleton._task.get_logger().report_text(
|
||||||
"Relaunching step {} on instance termination".format(_node.name)
|
"Relaunching step {} on instance termination".format(_node.name)
|
||||||
@ -3460,7 +3462,8 @@ class PipelineDecorator(PipelineController):
|
|||||||
pipeline_kwargs[k] = a_pipeline.get_parameters()[k]
|
pipeline_kwargs[k] = a_pipeline.get_parameters()[k]
|
||||||
|
|
||||||
# run the actual pipeline
|
# run the actual pipeline
|
||||||
if not start_controller_locally and not PipelineDecorator._debug_execute_step_process and pipeline_execution_queue:
|
if not start_controller_locally and \
|
||||||
|
not PipelineDecorator._debug_execute_step_process and pipeline_execution_queue:
|
||||||
# rerun the pipeline on a remote machine
|
# rerun the pipeline on a remote machine
|
||||||
a_pipeline._task.execute_remotely(queue_name=pipeline_execution_queue)
|
a_pipeline._task.execute_remotely(queue_name=pipeline_execution_queue)
|
||||||
# when we get here it means we are running remotely
|
# when we get here it means we are running remotely
|
||||||
|
@ -173,7 +173,7 @@ class Model(IdObjectBase, AsyncManagerMixin, _StorageUriMixin):
|
|||||||
if self.id is None:
|
if self.id is None:
|
||||||
if upload_storage_uri:
|
if upload_storage_uri:
|
||||||
self.upload_storage_uri = upload_storage_uri
|
self.upload_storage_uri = upload_storage_uri
|
||||||
self._create_empty_model(self.upload_storage_uri)
|
self._create_empty_model(self.upload_storage_uri, project_id=project_id)
|
||||||
elif upload_storage_uri:
|
elif upload_storage_uri:
|
||||||
self.upload_storage_uri = upload_storage_uri
|
self.upload_storage_uri = upload_storage_uri
|
||||||
|
|
||||||
@ -481,11 +481,11 @@ class Model(IdObjectBase, AsyncManagerMixin, _StorageUriMixin):
|
|||||||
res = self.send(req)
|
res = self.send(req)
|
||||||
return res.response.id
|
return res.response.id
|
||||||
|
|
||||||
def _create_empty_model(self, upload_storage_uri=None):
|
def _create_empty_model(self, upload_storage_uri=None, project_id=None):
|
||||||
upload_storage_uri = upload_storage_uri or self.upload_storage_uri
|
upload_storage_uri = upload_storage_uri or self.upload_storage_uri
|
||||||
name = make_message('Anonymous model %(time)s')
|
name = make_message('Anonymous model %(time)s')
|
||||||
uri = '{}/uploading_file'.format(upload_storage_uri or 'file://')
|
uri = '{}/uploading_file'.format(upload_storage_uri or 'file://')
|
||||||
req = models.CreateRequest(uri=uri, name=name, labels={})
|
req = models.CreateRequest(uri=uri, name=name, labels={}, project=project_id)
|
||||||
res = self.send(req)
|
res = self.send(req)
|
||||||
if not res:
|
if not res:
|
||||||
return False
|
return False
|
||||||
|
Loading…
Reference in New Issue
Block a user