From 23394a265ddbb6ff1de53525b998e280287f83e5 Mon Sep 17 00:00:00 2001 From: allegroai <> Date: Thu, 30 Jul 2020 15:07:25 +0300 Subject: [PATCH] flake8 + change ModuleNotFoundError to ImportError (support py 2.7/3.5) --- trains/automation/optimization.py | 3 ++- trains/automation/optuna/optuna.py | 5 ++-- trains/backend_api/api_proxy.py | 4 +-- trains/backend_config/config.py | 4 +-- trains/backend_interface/metrics/interface.py | 4 +-- trains/backend_interface/task/args.py | 3 ++- trains/binding/artifacts.py | 4 +-- trains/binding/frameworks/__init__.py | 4 +-- trains/binding/frameworks/fastai_bind.py | 4 +-- trains/binding/import_bind.py | 3 ++- trains/config/__init__.py | 2 +- trains/task.py | 2 +- trains/utilities/deferred.py | 6 ++--- trains/utilities/plotly_reporter.py | 6 +++-- trains/utilities/pyhocon/config_parser.py | 27 +++++++++++-------- trains/utilities/pyhocon/config_tree.py | 3 ++- 16 files changed, 47 insertions(+), 37 deletions(-) diff --git a/trains/automation/optimization.py b/trains/automation/optimization.py index 720e3b13..9fd00827 100644 --- a/trains/automation/optimization.py +++ b/trains/automation/optimization.py @@ -383,7 +383,8 @@ class SearchStrategy(object): If returns ``False``, the job was aborted / completed, and should be taken off the current job list - If there is a budget limitation, this call should update ``self.budget.compute_time.update`` / ``self.budget.iterations.update`` + If there is a budget limitation, this call should update + ``self.budget.compute_time.update`` / ``self.budget.iterations.update`` :param TrainsJob job: A ``TrainsJob`` object to monitor. diff --git a/trains/automation/optuna/optuna.py b/trains/automation/optuna/optuna.py index a0d4f963..27e8c161 100644 --- a/trains/automation/optuna/optuna.py +++ b/trains/automation/optuna/optuna.py @@ -1,9 +1,8 @@ -from time import sleep, time +from time import sleep from typing import Any, Optional, Sequence from ..optimization import Objective, SearchStrategy -from ..parameters import ( - DiscreteParameterRange, UniformParameterRange, RandomSeed, UniformIntegerParameterRange, Parameter, ) +from ..parameters import (DiscreteParameterRange, Parameter, UniformIntegerParameterRange, UniformParameterRange) from ...task import Task try: diff --git a/trains/backend_api/api_proxy.py b/trains/backend_api/api_proxy.py index 2fc02d5b..d0f7ba0f 100644 --- a/trains/backend_api/api_proxy.py +++ b/trains/backend_api/api_proxy.py @@ -57,10 +57,10 @@ class ExtApiServiceProxy(ApiServiceProxy): for module_path in self._get_services_modules(): try: return importlib.import_module(name, package=module_path) - except ModuleNotFoundError: + except ImportError: pass - raise ModuleNotFoundError( + raise ImportError( "No module '{}' in all predefined services module paths".format(name) ) diff --git a/trains/backend_config/config.py b/trains/backend_config/config.py index ac469705..52d344b2 100644 --- a/trains/backend_config/config.py +++ b/trains/backend_config/config.py @@ -311,8 +311,8 @@ class Config(object): try: return ConfigFactory.parse_file(file_path) except ParseSyntaxException as ex: - msg = "Failed parsing {0} ({1.__class__.__name__}): (at char {1.loc}, line:{1.lineno}, col:{1.column})".format( - file_path, ex) + msg = "Failed parsing {0} ({1.__class__.__name__}): " \ + "(at char {1.loc}, line:{1.lineno}, col:{1.column})".format(file_path, ex) six.reraise( ConfigurationError, ConfigurationError(msg, file_path=file_path), diff --git a/trains/backend_interface/metrics/interface.py b/trains/backend_interface/metrics/interface.py index cc9aed89..0fa626b1 100644 --- a/trains/backend_interface/metrics/interface.py +++ b/trains/backend_interface/metrics/interface.py @@ -65,8 +65,8 @@ class Metrics(InterfaceBase): """ Write events to the backend, uploading any required files. :param events: A list of event objects - :param async_enable: If True, upload is performed asynchronously and an AsyncResult object is returned, otherwise a - blocking call is made and the upload result is returned. + :param async_enable: If True, upload is performed asynchronously and an AsyncResult object is returned, + otherwise a blocking call is made and the upload result is returned. :param callback: A optional callback called when upload was completed in case async is True :return: .backend_api.session.CallResult if async is False otherwise AsyncResult. Note that if no events were sent, None will be returned. diff --git a/trains/backend_interface/task/args.py b/trains/backend_interface/task/args.py index 97b58bc3..2555c754 100644 --- a/trains/backend_interface/task/args.py +++ b/trains/backend_interface/task/args.py @@ -226,7 +226,8 @@ class _Arguments(object): # if we have an int, we should cast to float, because it is more generic if var_type == int: var_type = float - elif var_type == type(None): # noqa: E721 - do not change! because isinstance(var_type, type(None)) === False + elif var_type == type(None): # noqa: E721 - do not change! + # because isinstance(var_type, type(None)) === False var_type = str # now we should try and cast the value if we can try: diff --git a/trains/binding/artifacts.py b/trains/binding/artifacts.py index 2a813c4f..d7e46b2f 100644 --- a/trains/binding/artifacts.py +++ b/trains/binding/artifacts.py @@ -473,7 +473,7 @@ class Artifacts(object): try: with open(local_filename, 'wt') as f: f.write(artifact_object) - except Exception as ex: + except Exception: # cleanup and raise exception os.unlink(local_filename) raise @@ -495,7 +495,7 @@ class Artifacts(object): try: with open(local_filename, 'wb') as f: pickle.dump(artifact_object, f) - except Exception as ex: + except Exception: # cleanup and raise exception os.unlink(local_filename) raise diff --git a/trains/binding/frameworks/__init__.py b/trains/binding/frameworks/__init__.py index 09f41003..cdf66209 100644 --- a/trains/binding/frameworks/__init__.py +++ b/trains/binding/frameworks/__init__.py @@ -109,7 +109,7 @@ class WeightsFileHandler(object): @classmethod def add_pre_callback(cls, callback_function): - # type: (Callable[[Union[str, WeightsFileHandler.CallbackType], WeightsFileHandler.ModelInfo], Optional[WeightsFileHandler.ModelInfo]]) -> int + # type: (Callable[[Union[str, WeightsFileHandler.CallbackType], WeightsFileHandler.ModelInfo], Optional[WeightsFileHandler.ModelInfo]]) -> int # noqa """ Add a pre-save/load callback for weights files and return its handle. If the callback was already added, return the existing handle. @@ -127,7 +127,7 @@ class WeightsFileHandler(object): @classmethod def add_post_callback(cls, callback_function): - # type: (Callable[[Union[str, WeightsFileHandler.CallbackType], WeightsFileHandler.ModelInfo], WeightsFileHandler.ModelInfo]) -> int + # type: (Callable[[Union[str, WeightsFileHandler.CallbackType], WeightsFileHandler.ModelInfo], WeightsFileHandler.ModelInfo]) -> int # noqa """ Add a post-save/load callback for weights files and return its handle. If the callback was already added, return the existing handle. diff --git a/trains/binding/frameworks/fastai_bind.py b/trains/binding/frameworks/fastai_bind.py index 1f51ee84..b166b3be 100644 --- a/trains/binding/frameworks/fastai_bind.py +++ b/trains/binding/frameworks/fastai_bind.py @@ -42,7 +42,7 @@ class PatchFastai(object): try: PatchFastai.__metrics_names = ["train_loss"] if recorder.no_val else ["train_loss", "valid_loss"] PatchFastai.__metrics_names += recorder.metrics_names - except Exception as ex: + except Exception: pass @staticmethod @@ -84,7 +84,7 @@ class PatchFastai(object): iteration = kwargs.get("iteration", 0) for name, val in stats_report.items(): logger.report_scalar(title="model_stats_gradients", series=name, value=val, iteration=iteration) - except Exception as ex: + except Exception: pass @staticmethod diff --git a/trains/binding/import_bind.py b/trains/binding/import_bind.py index 4f093f57..da8fcc41 100644 --- a/trains/binding/import_bind.py +++ b/trains/binding/import_bind.py @@ -74,5 +74,6 @@ class PostImportHookPatching(object): @staticmethod def remove_on_import(name, func): - if name in PostImportHookPatching._post_import_hooks and func in PostImportHookPatching._post_import_hooks[name]: + if name in PostImportHookPatching._post_import_hooks and \ + func in PostImportHookPatching._post_import_hooks[name]: PostImportHookPatching._post_import_hooks[name].remove(func) diff --git a/trains/config/__init__.py b/trains/config/__init__.py index 43030de3..3486b3c3 100644 --- a/trains/config/__init__.py +++ b/trains/config/__init__.py @@ -19,7 +19,7 @@ def get_cache_dir(): cache_base_dir = Path( # noqa: F405 expandvars( expanduser( - TRAINS_CACHE_DIR.get() or + TRAINS_CACHE_DIR.get() or # noqa: F405 config.get("storage.cache.default_base_dir") or DEFAULT_CACHE_DIR # noqa: F405 ) diff --git a/trains/task.py b/trains/task.py index 376f3b1d..63158137 100644 --- a/trains/task.py +++ b/trains/task.py @@ -2375,7 +2375,7 @@ class Task(_Task): relative_file_name = filename.relative_to(offline_folder).as_posix() zf.write(filename.as_posix(), arcname=relative_file_name) print('TRAINS Task: Offline session stored in {}'.format(zip_file)) - except Exception as ex: + except Exception: pass # delete locking object (lock file) diff --git a/trains/utilities/deferred.py b/trains/utilities/deferred.py index 442aed00..93d75119 100644 --- a/trains/utilities/deferred.py +++ b/trains/utilities/deferred.py @@ -61,9 +61,9 @@ class DeferredExecution(object): def defer_execution(self, condition_or_attr_name=True): """ Deferred execution decorator, designed to wrap class functions for classes containing a deferred execution pool. - :param condition_or_attr_name: Condition controlling whether wrapped function should be deferred. True by default. - If a callable is provided, it will be called with the class instance (self) as first argument. - If a string is provided, a class instance (self) attribute by that name is evaluated. + :param condition_or_attr_name: Condition controlling whether wrapped function should be deferred. + True by default. If a callable is provided, it will be called with the class instance (self) + as first argument. If a string is provided, a class instance (self) attribute by that name is evaluated. :return: """ def decorator(func): diff --git a/trains/utilities/plotly_reporter.py b/trains/utilities/plotly_reporter.py index af4d4a85..f1d77a9e 100644 --- a/trains/utilities/plotly_reporter.py +++ b/trains/utilities/plotly_reporter.py @@ -147,7 +147,8 @@ def create_2d_scatter_series(np_row_wise, title="Scatter", series_name="Series", :param layout_config: optional dictionary for layout configuration, passed directly to plotly :return: Plotly chart dict. """ - plotly_obj = _plotly_scatter_layout_dict(title=title, xaxis_title=xtitle, yaxis_title=ytitle, comment=comment) # noqa: F841 + plotly_obj = _plotly_scatter_layout_dict( # noqa: F841 + title=title, xaxis_title=xtitle, yaxis_title=ytitle, comment=comment) assert np_row_wise.ndim == 2, "Expected a 2D numpy array" assert np_row_wise.shape[1] == 2, "Expected two columns X/Y e.g. [(x0,y0), (x1,y1) ...]" @@ -481,7 +482,8 @@ def create_plotly_table(table_plot, title, series, layout_config=None): """ if not pd: raise UsageError( - "pandas is required in order to support reporting tables using CSV or a URL, please install the pandas python package" + "pandas is required in order to support reporting tables using CSV or a URL, " + "please install the pandas python package" ) index_added = not isinstance(table_plot.index, pd.RangeIndex) headers_values = list([col] for col in table_plot.columns) diff --git a/trains/utilities/pyhocon/config_parser.py b/trains/utilities/pyhocon/config_parser.py index a9b95efc..5e4a112f 100755 --- a/trains/utilities/pyhocon/config_parser.py +++ b/trains/utilities/pyhocon/config_parser.py @@ -87,8 +87,9 @@ class ConfigFactory(object): :param resolve: if true, resolve substitutions :type resolve: boolean :param unresolved_value: assigned value value to unresolved substitution. - If overriden with a default value, it will replace all unresolved value to the default value. - If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its substitution expression (e.g., ${x}) + If overriden with a default value, it will replace all unresolved value to the default value. + If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its + substitution expression (e.g., ${x}) :type unresolved_value: class :return: Config object :type return: Config @@ -112,8 +113,9 @@ class ConfigFactory(object): :param resolve: if true, resolve substitutions :type resolve: boolean :param unresolved_value: assigned value value to unresolved substitution. - If overriden with a default value, it will replace all unresolved value to the default value. - If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its substitution expression (e.g., ${x}) + If overriden with a default value, it will replace all unresolved value to the default value. + If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by + its substitution expression (e.g., ${x}) :type unresolved_value: boolean :return: Config object or [] :type return: Config or list @@ -141,8 +143,9 @@ class ConfigFactory(object): :param resolve: if true, resolve substitutions :type resolve: boolean :param unresolved_value: assigned value value to unresolved substitution. - If overriden with a default value, it will replace all unresolved value to the default value. - If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its substitution expression (e.g., ${x}) + If overriden with a default value, it will replace all unresolved value to the default value. + If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by + its substitution expression (e.g., ${x}) :type unresolved_value: boolean :return: Config object :type return: Config @@ -234,8 +237,9 @@ class ConfigParser(object): :param resolve: if true, resolve substitutions :type resolve: boolean :param unresolved_value: assigned value value to unresolved substitution. - If overriden with a default value, it will replace all unresolved value to the default value. - If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its substitution expression (e.g., ${x}) + If overriden with a default value, it will replace all unresolved value to the default value. + If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by + its substitution expression (e.g., ${x}) :type unresolved_value: boolean :return: a ConfigTree or a list """ @@ -434,7 +438,8 @@ class ConfigParser(object): config = config_expr.parseString(content, parseAll=True)[0] if resolve: - allow_unresolved = resolve and unresolved_value is not DEFAULT_SUBSTITUTION and unresolved_value is not MANDATORY_SUBSTITUTION + allow_unresolved = resolve and unresolved_value is not DEFAULT_SUBSTITUTION and \ + unresolved_value is not MANDATORY_SUBSTITUTION has_unresolved = cls.resolve_substitutions(config, allow_unresolved) if has_unresolved and unresolved_value is MANDATORY_SUBSTITUTION: raise ConfigSubstitutionException( @@ -489,8 +494,8 @@ class ConfigParser(object): if len(prop_path) > 1 and config.get(substitution.variable, None) is not None: continue # If value is present in latest version, don't do anything if prop_path[0] == key: - if isinstance( - previous_item, ConfigValues) and not accept_unresolved: # We hit a dead end, we cannot evaluate + if isinstance(previous_item, ConfigValues) and not accept_unresolved: + # We hit a dead end, we cannot evaluate raise ConfigSubstitutionException( "Property {variable} cannot be substituted. Check for cycles.".format( variable=substitution.variable diff --git a/trains/utilities/pyhocon/config_tree.py b/trains/utilities/pyhocon/config_tree.py index d133e0b2..138a9770 100755 --- a/trains/utilities/pyhocon/config_tree.py +++ b/trains/utilities/pyhocon/config_tree.py @@ -515,7 +515,8 @@ class ConfigValues(object): tok_type = determine_type(token) if first_tok_type is not tok_type: raise ConfigWrongTypeException( - "Token '{token}' of type {tok_type} (index {index}) must be of type {req_tok_type} (line: {line}, col: {col})".format( + "Token '{token}' of type {tok_type} (index {index}) must be of type {req_tok_type} " + "(line: {line}, col: {col})".format( token=token, index=index + 1, tok_type=tok_type.__name__,