1
0
mirror of https://github.com/clearml/clearml synced 2025-04-16 21:42:10 +00:00

Fixed Python 2.7 support, refactor, PEP8

This commit is contained in:
allegroai 2022-11-09 11:42:06 +02:00
parent 039f9f6938
commit a4d83c30ba
8 changed files with 37 additions and 32 deletions

View File

@ -8,11 +8,11 @@ from .logger import Logger
from .storage import StorageManager
from .errors import UsageError
from .datasets import Dataset
from .backend_api import browser_login
TaskTypes = Task.TaskTypes
if not PY2:
from .backend_api import browser_login # noqa: F401
from .automation.controller import PipelineController, PipelineDecorator # noqa: F401
__all__ = [

View File

@ -38,12 +38,13 @@ for a very long time for a non-responding or mis-configured server
ENV_API_EXTRA_RETRY_CODES = EnvEntry("CLEARML_API_EXTRA_RETRY_CODES")
class MissingConfigError(Exception):
class MissingConfigError(ValueError):
def __init__(self, message=None):
if message is None:
message = (
"It seems ClearML is not configured on this machine!\n"
"To get started with ClearML, setup your own 'clearml-server' or create a free account at https://app.clear.ml\n"
"Setup instructions can be found here: https://clear.ml/docs/latest/docs"
"To get started with ClearML, setup your own 'clearml-server' or "
"create a free account at https://app.clear.ml\n"
"Setup instructions can be found here: https://clear.ml/docs"
)
super().__init__(message)
super(MissingConfigError, self).__init__(message)

View File

@ -915,7 +915,8 @@ def browser_login(clearml_server=None):
print("")
if counter:
print("\nHurrah! \N{FACE WITH PARTY HORN AND PARTY HAT} \N{CONFETTI BALL} \N{party popper}")
# these emojis actually requires python 3.6+
print("\nHurrah! \N{face with party horn and party hat} \N{confetti ball} \N{party popper}") # noqa: E999
if token:
# set Token

View File

@ -439,7 +439,7 @@ class Dataset(object):
:param recursive: If True match all wildcard files recursively
:param verbose: If True print to console files added/modified
:param max_workers: The number of threads to add the external files with. Useful when `source_url` is
a sequence. Defaults to the number of logical cores
a sequence. Defaults to the number of logical cores
:return: Number of file links added
"""
self._dirty = True

View File

@ -632,9 +632,9 @@ class StorageHelper(object):
"""
Get the metadata of the a remote object.
The metadata is a dict containing the following keys: `name`, `size`.
:param object obj: The remote object
:return: A dict containing the metadata of the remote object
"""
return {

View File

@ -5,7 +5,7 @@ import tarfile
from multiprocessing.pool import ThreadPool
from random import random
from time import time
from typing import List, Optional
from typing import List, Optional, Union
from zipfile import ZipFile
from six.moves.urllib.parse import urlparse
@ -426,7 +426,7 @@ class StorageManager(object):
@classmethod
def list(cls, remote_url, return_full_path=False, with_metadata=False):
# type: (str, bool) -> Optional[List[Union[str, dict]]]
# type: (str, bool, bool) -> Optional[List[Union[str, dict]]]
"""
Return a list of object names inside the base path or dictionaries containing the corresponding
objects' metadata (in case `with_metadata` is True)
@ -472,7 +472,7 @@ class StorageManager(object):
"""
Get the metadata of the a remote object.
The metadata is a dict containing the following keys: `name`, `size`.
:param str remote_url: Source remote storage location, tree structure of `remote_url` will
be created under the target local_folder. Supports S3/GS/Azure, shared filesystem and http(s).
Example: 's3://bucket/data/'
@ -480,7 +480,7 @@ class StorageManager(object):
:return: A dict containing the metadata of the remote object. In case of an error, `None` is returned
"""
helper = StorageHelper.get(remote_url)
obj = helper.get_object(remote_url)
obj = helper.get_object(remote_url)
if not obj:
return None
return helper.get_object_metadata(obj)

View File

@ -16,7 +16,7 @@ try:
# noinspection PyCompatibility
from collections.abc import Sequence as CollectionsSequence
except ImportError:
from collections import Sequence as CollectionsSequence
from collections import Sequence as CollectionsSequence # noqa
from typing import (
Optional,
@ -92,7 +92,7 @@ from .binding.args import (
from .utilities.dicts import ReadOnlyDict, merge_dicts
from .utilities.proxy_object import (
ProxyDictPreWrite, ProxyDictPostWrite, flatten_dictionary,
nested_from_flat_dictionary, naive_nested_from_flat_dictionary, )
nested_from_flat_dictionary, naive_nested_from_flat_dictionary, StubObject as _TaskStub)
from .utilities.resource_monitor import ResourceMonitor
from .utilities.seed import make_deterministic
from .utilities.lowlevel.threads import get_current_thread_id
@ -605,7 +605,9 @@ class Task(_Task):
if not ENV_IGNORE_MISSING_CONFIG.get():
raise
getLogger().warning(str(e))
return _TaskStub()
# return a Task-stub instead of the original class
# this will make sure users can still call the Stub without code breaking
return _TaskStub() # noqa
# set defaults
if cls._offline_mode:
task.output_uri = None
@ -1932,10 +1934,11 @@ class Task(_Task):
- In case the ``serialization_function`` argument is set - any extension is supported
:param Callable[Any, Union[bytes, bytearray]] serialization_function: A serialization function that takes one
parameter of any types which is the object to be serialized. The function should return a `bytes` or `bytearray`
object, which represents the serialized object. Note that the object will be immediately serialized using this function,
thus other serialization methods will not be used (e.g. `pandas.DataFrame.to_csv`), even if possible.
To deserialize this artifact when getting it using the `Artifact.get` method, use its `deserialization_function` argument
parameter of any types which is the object to be serialized. The function should return
a `bytes` or `bytearray` object, which represents the serialized object. Note that the object will be
immediately serialized using this function, thus other serialization methods will not be used
(e.g. `pandas.DataFrame.to_csv`), even if possible. To deserialize this artifact when getting
it using the `Artifact.get` method, use its `deserialization_function` argument.
:return: The status of the upload.
@ -2703,7 +2706,7 @@ class Task(_Task):
project_name = task_data.get('project_name') or Task._get_project_name(task_data.get('project', ''))
target_task = Task.create(project_name=project_name, task_name=task_data.get('name', None))
elif isinstance(target_task, six.string_types):
target_task = Task.get_task(task_id=target_task)
target_task = Task.get_task(task_id=target_task) # type: Optional[Task]
elif not isinstance(target_task, Task):
raise ValueError(
"`target_task` must be either Task id (str) or Task object, "
@ -4276,14 +4279,3 @@ class Task(_Task):
auto_connect_frameworks={'detect_repository': False}) \
if state['main'] else Task.get_task(task_id=state['id'])
self.__dict__ = task.__dict__
class _TaskStub(object):
def __call__(self, *args, **kwargs):
return self
def __getattr__(self, attr):
return self
def __setattr__(self, attr, val):
pass

View File

@ -80,6 +80,17 @@ class ProxyDictPreWrite(dict):
return self._set_callback((prefix + '.' + key_value[0], key_value[1],))
class StubObject(object):
def __call__(self, *args, **kwargs):
return self
def __getattr__(self, attr):
return self
def __setattr__(self, attr, val):
pass
def verify_basic_type(a_dict_list, basic_types=None):
basic_types = (float, int, bool, six.string_types, ) if not basic_types else \
tuple(b for b in basic_types if b not in (list, tuple, dict))