Add missing skip_zero_size_check argument in StorageManager.download_foler()

PEP8
This commit is contained in:
allegroai 2022-02-26 13:38:42 +02:00
parent 76559b6e2d
commit a5b16db95c
7 changed files with 16 additions and 19 deletions

View File

@ -30,7 +30,7 @@ ENV_API_DEFAULT_REQ_METHOD = EnvEntry("CLEARML_API_DEFAULT_REQ_METHOD")
"""
Experimental option to make the SDK retry on additional error codes.
Use a comma-separated list of integer return codes.
NOTE: this changes behavior and might cause the experiment to wait
NOTE: this changes behavior and might cause the experiment to wait
for a very long time for a non-responding or mis-configured server
"""
ENV_API_EXTRA_RETRY_CODES = EnvEntry("CLEARML_API_EXTRA_RETRY_CODES")

View File

@ -134,11 +134,11 @@ class PatchFire:
replaced_args = []
for param in PatchFire.__remote_task_params[PatchFire._section_name].values():
if command is not None and param.type == PatchFire._command_arg_type_template % command:
replaced_args.append("--" + param.name[len(command + PatchFire._args_sep) :])
replaced_args.append("--" + param.name[len(command + PatchFire._args_sep):])
value = PatchFire.__remote_task_params_dict[param.name]
if len(value) > 0:
replaced_args.append(value)
if param.type == PatchFire._shared_arg_type:
if param.type == PatchFire._shared_arg_type:
replaced_args.append("--" + param.name)
value = PatchFire.__remote_task_params_dict[param.name]
if len(value) > 0:

View File

@ -167,7 +167,7 @@ class CacheManager(object):
):
# parse the lock filename
name = f.name[
len(CacheManager._lockfile_prefix) : -len(
len(CacheManager._lockfile_prefix):-len(
CacheManager._lockfile_suffix
)
]
@ -181,7 +181,7 @@ class CacheManager(object):
lock_files.pop(f.name, None)
# delete old files
files = files[self._file_limit :]
files = files[self._file_limit:]
for i, f in enumerate(files):
if i < self._file_limit:
continue

View File

@ -1831,7 +1831,6 @@ class _AzureBlobServiceStorageDriver(_Driver):
def is_legacy(self):
return self.__legacy
@attrs
class _Object(object):
container = attrib()
@ -1950,7 +1949,6 @@ class _AzureBlobServiceStorageDriver(_Driver):
download_done = threading.Event()
download_done.counter = 0
def callback_func(current, total):
if callback:
chunk = current - download_done.counter
@ -1959,7 +1957,6 @@ class _AzureBlobServiceStorageDriver(_Driver):
if current >= total:
download_done.set()
container = obj.container
container.blob_service.MAX_SINGLE_GET_SIZE = 5 * 1024 * 1024
_ = container.get_blob_to_path(

View File

@ -249,9 +249,9 @@ class StorageManager(object):
@classmethod
def download_folder(
cls, remote_url, local_folder=None, match_wildcard=None, overwrite=False
cls, remote_url, local_folder=None, match_wildcard=None, overwrite=False, skip_zero_size_check=False
):
# type: (str, Optional[str], Optional[str], bool) -> Optional[str]
# type: (str, Optional[str], Optional[str], bool, bool) -> Optional[str]
"""
Download remote folder recursively to the local machine, maintaining the sub folder structure
from the remote storage.
@ -271,6 +271,7 @@ class StorageManager(object):
Example: `*.json`
:param bool overwrite: If False, and target files exist do not download.
If True always download the remote files. Default False.
:param bool skip_zero_size_check: If True no error will be raised for files with zero bytes size.
:return: Target local folder
"""

View File

@ -1,3 +1,2 @@
Keras
tensorflow>=2.0
clearml

View File

@ -1,9 +1,9 @@
matplotlib
tensorboardX
tensorboard>=1.14.0
torch >= 1.1.0 ; python_version >= '3.8'
torch <= 1.5.1 ; python_version < '3.8'
torchvision
tqdm
jsonschema==3.2.0 ; python_version <= '3.5'
clearml
jsonschema==3.2.0 ; python_version <= '3.5'
matplotlib
pytorch-ignite
tensorboard>=1.14.0
tensorboardX
torch>=1.1.0
torchvision>=0.3.0
tqdm