diff --git a/clearml/automation/optimization.py b/clearml/automation/optimization.py
index 75f14628..080142fe 100644
--- a/clearml/automation/optimization.py
+++ b/clearml/automation/optimization.py
@@ -610,7 +610,7 @@ class SearchStrategy(object):
:param int top_k: The number of Tasks (experiments) to return.
:param all_metrics: Default False, only return the objective metric on the metrics dictionary.
If True, return all scalar metrics of the experiment
- :param all_hyper_parameters: Default False. If True, return all the hyper-parameters from all the sections.
+ :param all_hyper_parameters: Default False. If True, return all the hyperparameters from all the sections.
:param only_completed: return only completed Tasks. Default False.
:return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance,
@@ -791,7 +791,7 @@ class SearchStrategy(object):
def _validate_base_task(self):
# type: () -> ()
"""
- Check the base task exists and contains the requested Objective metric and hyper parameters.
+ Check the base task exists and contains the requested Objective metric and hyperparameters.
"""
# check if the task exists
try:
@@ -929,7 +929,7 @@ class SearchStrategy(object):
class GridSearch(SearchStrategy):
"""
- Grid search strategy controller. Full grid sampling of every hyper-parameter combination.
+ Grid search strategy controller. Full grid sampling of every hyperparameter combination.
"""
def __init__(
@@ -1001,7 +1001,7 @@ class GridSearch(SearchStrategy):
class RandomSearch(SearchStrategy):
"""
- Random search strategy controller. Random uniform sampling of hyper-parameters.
+ Random search strategy controller. Random uniform sampling of hyperparameters.
"""
# Number of already chosen random samples before assuming we covered the entire hyper-parameter space
@@ -1105,7 +1105,7 @@ class HyperParameterOptimizer(object):
):
# type: (...) -> ()
"""
- Create a new hyper-parameter controller. The newly created object will launch and monitor the new experiments.
+ Create a new hyperparameter controller. The newly created object will launch and monitor the new experiments.
:param str base_task_id: The Task ID to be used as template experiment to optimize.
:param list hyper_parameters: The list of Parameter objects to optimize over.
@@ -1120,7 +1120,7 @@ class HyperParameterOptimizer(object):
- ``min_global`` - Minimize the min value of *all* reported values for the specific title/series scalar.
- ``max_global`` - Maximize the max value of *all* reported values for the specific title/series scalar.
- :param class.SearchStrategy optimizer_class: The SearchStrategy optimizer to use for the hyper-parameter search
+ :param class.SearchStrategy optimizer_class: The SearchStrategy optimizer to use for the hyperparameter search
:param int max_number_of_concurrent_tasks: The maximum number of concurrent Tasks (experiments) running at the
same time.
:param str execution_queue: The execution queue to use for launching Tasks (experiments).
@@ -1516,7 +1516,7 @@ class HyperParameterOptimizer(object):
:param int top_k: The number of Tasks (experiments) to return.
:param all_metrics: Default False, only return the objective metric on the metrics dictionary.
If True, return all scalar metrics of the experiment
- :param all_hyper_parameters: Default False. If True, return all the hyper-parameters from all the sections.
+ :param all_hyper_parameters: Default False. If True, return all the hyperparameters from all the sections.
:param only_completed: return only completed Tasks. Default False.
:return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance,
diff --git a/clearml/automation/parameters.py b/clearml/automation/parameters.py
index a32cee74..d88b2cfc 100644
--- a/clearml/automation/parameters.py
+++ b/clearml/automation/parameters.py
@@ -15,7 +15,7 @@ class RandomSeed(object):
def set_random_seed(seed=1337):
# type: (int) -> ()
"""
- Set global seed for all hyper-parameter strategy random number sampling.
+ Set global seed for all hyperparameter strategy random number sampling.
:param int seed: The random seed.
"""
@@ -26,7 +26,7 @@ class RandomSeed(object):
def get_random_seed():
# type: () -> int
"""
- Get the global seed for all hyper-parameter strategy random number sampling.
+ Get the global seed for all hyperparameter strategy random number sampling.
:return: The random seed.
"""
diff --git a/clearml/model.py b/clearml/model.py
index fa51b80a..7b9271c9 100644
--- a/clearml/model.py
+++ b/clearml/model.py
@@ -2160,7 +2160,7 @@ class OutputModel(BaseModel):
# type: (str) -> None
"""
Set the URI of the storage destination for uploaded model weight files.
- Supported storage destinations include S3, Google Cloud Storage), and file locations.
+ Supported storage destinations include S3, Google Cloud Storage, and file locations.
Using this method, file uploads are separate and then a link to each is stored in the model object.
diff --git a/docs/logger.md b/docs/logger.md
index 58842593..7c346020 100644
--- a/docs/logger.md
+++ b/docs/logger.md
@@ -819,7 +819,7 @@ def report_surface(self, title, series, matrix, iteration, xlabels=None, ylabels
### Images
Use to report an image and upload its contents to the bucket specified in the **ClearML** configuration file,
-or a [a default upload destination](#set-default-upload-destination), if you set a default.
+or a [default upload destination](#set-default-upload-destination), if you set a default.
First [get the current logger](#get-the-current-logger) and then use it (see an [example script](https://github.com/allegroai/clearml/blob/master/examples/manual_reporting.py)) with the following method.
@@ -931,7 +931,7 @@ def report_image(self, title, series, iteration, local_path=None, matrix=None, m
In order for **ClearML** to log a dictionary of parameters, use the `Task.connect` method.
-For example, to log the hyper-parameters learning_rate
, batch_size
, display_step
, model_path
, n_hidden_1
, and n_hidden_2
:
+For example, to log the hyperparameters learning_rate
, batch_size
, display_step
, model_path
, n_hidden_1
, and n_hidden_2
:
```python
# Create a dictionary of parameters
diff --git a/examples/frameworks/pytorch-lightning/pytorch_lightning_example.py b/examples/frameworks/pytorch-lightning/pytorch_lightning_example.py
index 337d829e..7c85a9e2 100644
--- a/examples/frameworks/pytorch-lightning/pytorch_lightning_example.py
+++ b/examples/frameworks/pytorch-lightning/pytorch_lightning_example.py
@@ -65,7 +65,7 @@ if __name__ == '__main__':
parser = LitClassifier.add_model_specific_args(parser)
args = parser.parse_args()
- Task.init(project_name="examples-internal", task_name="lightning checkpoint issue and argparser")
+ Task.init(project_name="examples", task_name="pytorch lightning MNIST")
# ------------
# data