mirror of
https://github.com/clearml/clearml
synced 2025-06-04 03:47:57 +00:00
Fixed Python 2.7 support, refactor, PEP8
This commit is contained in:
parent
039f9f6938
commit
a4d83c30ba
@ -8,11 +8,11 @@ from .logger import Logger
|
|||||||
from .storage import StorageManager
|
from .storage import StorageManager
|
||||||
from .errors import UsageError
|
from .errors import UsageError
|
||||||
from .datasets import Dataset
|
from .datasets import Dataset
|
||||||
from .backend_api import browser_login
|
|
||||||
|
|
||||||
TaskTypes = Task.TaskTypes
|
TaskTypes = Task.TaskTypes
|
||||||
|
|
||||||
if not PY2:
|
if not PY2:
|
||||||
|
from .backend_api import browser_login # noqa: F401
|
||||||
from .automation.controller import PipelineController, PipelineDecorator # noqa: F401
|
from .automation.controller import PipelineController, PipelineDecorator # noqa: F401
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -38,12 +38,13 @@ for a very long time for a non-responding or mis-configured server
|
|||||||
ENV_API_EXTRA_RETRY_CODES = EnvEntry("CLEARML_API_EXTRA_RETRY_CODES")
|
ENV_API_EXTRA_RETRY_CODES = EnvEntry("CLEARML_API_EXTRA_RETRY_CODES")
|
||||||
|
|
||||||
|
|
||||||
class MissingConfigError(Exception):
|
class MissingConfigError(ValueError):
|
||||||
def __init__(self, message=None):
|
def __init__(self, message=None):
|
||||||
if message is None:
|
if message is None:
|
||||||
message = (
|
message = (
|
||||||
"It seems ClearML is not configured on this machine!\n"
|
"It seems ClearML is not configured on this machine!\n"
|
||||||
"To get started with ClearML, setup your own 'clearml-server' or create a free account at https://app.clear.ml\n"
|
"To get started with ClearML, setup your own 'clearml-server' or "
|
||||||
"Setup instructions can be found here: https://clear.ml/docs/latest/docs"
|
"create a free account at https://app.clear.ml\n"
|
||||||
|
"Setup instructions can be found here: https://clear.ml/docs"
|
||||||
)
|
)
|
||||||
super().__init__(message)
|
super(MissingConfigError, self).__init__(message)
|
||||||
|
@ -915,7 +915,8 @@ def browser_login(clearml_server=None):
|
|||||||
|
|
||||||
print("")
|
print("")
|
||||||
if counter:
|
if counter:
|
||||||
print("\nHurrah! \N{FACE WITH PARTY HORN AND PARTY HAT} \N{CONFETTI BALL} \N{party popper}")
|
# these emojis actually requires python 3.6+
|
||||||
|
print("\nHurrah! \N{face with party horn and party hat} \N{confetti ball} \N{party popper}") # noqa: E999
|
||||||
|
|
||||||
if token:
|
if token:
|
||||||
# set Token
|
# set Token
|
||||||
|
@ -439,7 +439,7 @@ class Dataset(object):
|
|||||||
:param recursive: If True match all wildcard files recursively
|
:param recursive: If True match all wildcard files recursively
|
||||||
:param verbose: If True print to console files added/modified
|
:param verbose: If True print to console files added/modified
|
||||||
:param max_workers: The number of threads to add the external files with. Useful when `source_url` is
|
:param max_workers: The number of threads to add the external files with. Useful when `source_url` is
|
||||||
a sequence. Defaults to the number of logical cores
|
a sequence. Defaults to the number of logical cores
|
||||||
:return: Number of file links added
|
:return: Number of file links added
|
||||||
"""
|
"""
|
||||||
self._dirty = True
|
self._dirty = True
|
||||||
|
@ -632,9 +632,9 @@ class StorageHelper(object):
|
|||||||
"""
|
"""
|
||||||
Get the metadata of the a remote object.
|
Get the metadata of the a remote object.
|
||||||
The metadata is a dict containing the following keys: `name`, `size`.
|
The metadata is a dict containing the following keys: `name`, `size`.
|
||||||
|
|
||||||
:param object obj: The remote object
|
:param object obj: The remote object
|
||||||
|
|
||||||
:return: A dict containing the metadata of the remote object
|
:return: A dict containing the metadata of the remote object
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
|
@ -5,7 +5,7 @@ import tarfile
|
|||||||
from multiprocessing.pool import ThreadPool
|
from multiprocessing.pool import ThreadPool
|
||||||
from random import random
|
from random import random
|
||||||
from time import time
|
from time import time
|
||||||
from typing import List, Optional
|
from typing import List, Optional, Union
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
from six.moves.urllib.parse import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
|
|
||||||
@ -426,7 +426,7 @@ class StorageManager(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list(cls, remote_url, return_full_path=False, with_metadata=False):
|
def list(cls, remote_url, return_full_path=False, with_metadata=False):
|
||||||
# type: (str, bool) -> Optional[List[Union[str, dict]]]
|
# type: (str, bool, bool) -> Optional[List[Union[str, dict]]]
|
||||||
"""
|
"""
|
||||||
Return a list of object names inside the base path or dictionaries containing the corresponding
|
Return a list of object names inside the base path or dictionaries containing the corresponding
|
||||||
objects' metadata (in case `with_metadata` is True)
|
objects' metadata (in case `with_metadata` is True)
|
||||||
@ -472,7 +472,7 @@ class StorageManager(object):
|
|||||||
"""
|
"""
|
||||||
Get the metadata of the a remote object.
|
Get the metadata of the a remote object.
|
||||||
The metadata is a dict containing the following keys: `name`, `size`.
|
The metadata is a dict containing the following keys: `name`, `size`.
|
||||||
|
|
||||||
:param str remote_url: Source remote storage location, tree structure of `remote_url` will
|
:param str remote_url: Source remote storage location, tree structure of `remote_url` will
|
||||||
be created under the target local_folder. Supports S3/GS/Azure, shared filesystem and http(s).
|
be created under the target local_folder. Supports S3/GS/Azure, shared filesystem and http(s).
|
||||||
Example: 's3://bucket/data/'
|
Example: 's3://bucket/data/'
|
||||||
@ -480,7 +480,7 @@ class StorageManager(object):
|
|||||||
:return: A dict containing the metadata of the remote object. In case of an error, `None` is returned
|
:return: A dict containing the metadata of the remote object. In case of an error, `None` is returned
|
||||||
"""
|
"""
|
||||||
helper = StorageHelper.get(remote_url)
|
helper = StorageHelper.get(remote_url)
|
||||||
obj = helper.get_object(remote_url)
|
obj = helper.get_object(remote_url)
|
||||||
if not obj:
|
if not obj:
|
||||||
return None
|
return None
|
||||||
return helper.get_object_metadata(obj)
|
return helper.get_object_metadata(obj)
|
||||||
|
@ -16,7 +16,7 @@ try:
|
|||||||
# noinspection PyCompatibility
|
# noinspection PyCompatibility
|
||||||
from collections.abc import Sequence as CollectionsSequence
|
from collections.abc import Sequence as CollectionsSequence
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from collections import Sequence as CollectionsSequence
|
from collections import Sequence as CollectionsSequence # noqa
|
||||||
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
Optional,
|
||||||
@ -92,7 +92,7 @@ from .binding.args import (
|
|||||||
from .utilities.dicts import ReadOnlyDict, merge_dicts
|
from .utilities.dicts import ReadOnlyDict, merge_dicts
|
||||||
from .utilities.proxy_object import (
|
from .utilities.proxy_object import (
|
||||||
ProxyDictPreWrite, ProxyDictPostWrite, flatten_dictionary,
|
ProxyDictPreWrite, ProxyDictPostWrite, flatten_dictionary,
|
||||||
nested_from_flat_dictionary, naive_nested_from_flat_dictionary, )
|
nested_from_flat_dictionary, naive_nested_from_flat_dictionary, StubObject as _TaskStub)
|
||||||
from .utilities.resource_monitor import ResourceMonitor
|
from .utilities.resource_monitor import ResourceMonitor
|
||||||
from .utilities.seed import make_deterministic
|
from .utilities.seed import make_deterministic
|
||||||
from .utilities.lowlevel.threads import get_current_thread_id
|
from .utilities.lowlevel.threads import get_current_thread_id
|
||||||
@ -605,7 +605,9 @@ class Task(_Task):
|
|||||||
if not ENV_IGNORE_MISSING_CONFIG.get():
|
if not ENV_IGNORE_MISSING_CONFIG.get():
|
||||||
raise
|
raise
|
||||||
getLogger().warning(str(e))
|
getLogger().warning(str(e))
|
||||||
return _TaskStub()
|
# return a Task-stub instead of the original class
|
||||||
|
# this will make sure users can still call the Stub without code breaking
|
||||||
|
return _TaskStub() # noqa
|
||||||
# set defaults
|
# set defaults
|
||||||
if cls._offline_mode:
|
if cls._offline_mode:
|
||||||
task.output_uri = None
|
task.output_uri = None
|
||||||
@ -1932,10 +1934,11 @@ class Task(_Task):
|
|||||||
- In case the ``serialization_function`` argument is set - any extension is supported
|
- In case the ``serialization_function`` argument is set - any extension is supported
|
||||||
|
|
||||||
:param Callable[Any, Union[bytes, bytearray]] serialization_function: A serialization function that takes one
|
:param Callable[Any, Union[bytes, bytearray]] serialization_function: A serialization function that takes one
|
||||||
parameter of any types which is the object to be serialized. The function should return a `bytes` or `bytearray`
|
parameter of any types which is the object to be serialized. The function should return
|
||||||
object, which represents the serialized object. Note that the object will be immediately serialized using this function,
|
a `bytes` or `bytearray` object, which represents the serialized object. Note that the object will be
|
||||||
thus other serialization methods will not be used (e.g. `pandas.DataFrame.to_csv`), even if possible.
|
immediately serialized using this function, thus other serialization methods will not be used
|
||||||
To deserialize this artifact when getting it using the `Artifact.get` method, use its `deserialization_function` argument
|
(e.g. `pandas.DataFrame.to_csv`), even if possible. To deserialize this artifact when getting
|
||||||
|
it using the `Artifact.get` method, use its `deserialization_function` argument.
|
||||||
|
|
||||||
:return: The status of the upload.
|
:return: The status of the upload.
|
||||||
|
|
||||||
@ -2703,7 +2706,7 @@ class Task(_Task):
|
|||||||
project_name = task_data.get('project_name') or Task._get_project_name(task_data.get('project', ''))
|
project_name = task_data.get('project_name') or Task._get_project_name(task_data.get('project', ''))
|
||||||
target_task = Task.create(project_name=project_name, task_name=task_data.get('name', None))
|
target_task = Task.create(project_name=project_name, task_name=task_data.get('name', None))
|
||||||
elif isinstance(target_task, six.string_types):
|
elif isinstance(target_task, six.string_types):
|
||||||
target_task = Task.get_task(task_id=target_task)
|
target_task = Task.get_task(task_id=target_task) # type: Optional[Task]
|
||||||
elif not isinstance(target_task, Task):
|
elif not isinstance(target_task, Task):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"`target_task` must be either Task id (str) or Task object, "
|
"`target_task` must be either Task id (str) or Task object, "
|
||||||
@ -4276,14 +4279,3 @@ class Task(_Task):
|
|||||||
auto_connect_frameworks={'detect_repository': False}) \
|
auto_connect_frameworks={'detect_repository': False}) \
|
||||||
if state['main'] else Task.get_task(task_id=state['id'])
|
if state['main'] else Task.get_task(task_id=state['id'])
|
||||||
self.__dict__ = task.__dict__
|
self.__dict__ = task.__dict__
|
||||||
|
|
||||||
|
|
||||||
class _TaskStub(object):
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __setattr__(self, attr, val):
|
|
||||||
pass
|
|
||||||
|
@ -80,6 +80,17 @@ class ProxyDictPreWrite(dict):
|
|||||||
return self._set_callback((prefix + '.' + key_value[0], key_value[1],))
|
return self._set_callback((prefix + '.' + key_value[0], key_value[1],))
|
||||||
|
|
||||||
|
|
||||||
|
class StubObject(object):
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __setattr__(self, attr, val):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def verify_basic_type(a_dict_list, basic_types=None):
|
def verify_basic_type(a_dict_list, basic_types=None):
|
||||||
basic_types = (float, int, bool, six.string_types, ) if not basic_types else \
|
basic_types = (float, int, bool, six.string_types, ) if not basic_types else \
|
||||||
tuple(b for b in basic_types if b not in (list, tuple, dict))
|
tuple(b for b in basic_types if b not in (list, tuple, dict))
|
||||||
|
Loading…
Reference in New Issue
Block a user