Edit docstrings (#1084)

This commit is contained in:
pollfly 2023-07-25 12:19:24 +03:00 committed by GitHub
parent 09363b0d30
commit c451589298
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 24 additions and 24 deletions

View File

@ -610,7 +610,7 @@ class SearchStrategy(object):
:param int top_k: The number of Tasks (experiments) to return. :param int top_k: The number of Tasks (experiments) to return.
:param all_metrics: Default False, only return the objective metric on the metrics dictionary. :param all_metrics: Default False, only return the objective metric on the metrics dictionary.
If True, return all scalar metrics of the experiment If True, return all scalar metrics of the experiment
:param all_hyper_parameters: Default False. If True, return all the hyper-parameters from all the sections. :param all_hyper_parameters: Default False. If True, return all the hyperparameters from all the sections.
:param only_completed: return only completed Tasks. Default False. :param only_completed: return only completed Tasks. Default False.
:return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance, :return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance,
@ -929,7 +929,7 @@ class SearchStrategy(object):
class GridSearch(SearchStrategy): class GridSearch(SearchStrategy):
""" """
Grid search strategy controller. Full grid sampling of every hyper-parameter combination. Grid search strategy controller. Full grid sampling of every hyperparameter combination.
""" """
def __init__( def __init__(
@ -1001,7 +1001,7 @@ class GridSearch(SearchStrategy):
class RandomSearch(SearchStrategy): class RandomSearch(SearchStrategy):
""" """
Random search strategy controller. Random uniform sampling of hyper-parameters. Random search strategy controller. Random uniform sampling of hyperparameters.
""" """
# Number of already chosen random samples before assuming we covered the entire hyper-parameter space # Number of already chosen random samples before assuming we covered the entire hyper-parameter space
@ -1105,7 +1105,7 @@ class HyperParameterOptimizer(object):
): ):
# type: (...) -> () # type: (...) -> ()
""" """
Create a new hyper-parameter controller. The newly created object will launch and monitor the new experiments. Create a new hyperparameter controller. The newly created object will launch and monitor the new experiments.
:param str base_task_id: The Task ID to be used as template experiment to optimize. :param str base_task_id: The Task ID to be used as template experiment to optimize.
:param list hyper_parameters: The list of Parameter objects to optimize over. :param list hyper_parameters: The list of Parameter objects to optimize over.
@ -1120,7 +1120,7 @@ class HyperParameterOptimizer(object):
- ``min_global`` - Minimize the min value of *all* reported values for the specific title/series scalar. - ``min_global`` - Minimize the min value of *all* reported values for the specific title/series scalar.
- ``max_global`` - Maximize the max value of *all* reported values for the specific title/series scalar. - ``max_global`` - Maximize the max value of *all* reported values for the specific title/series scalar.
:param class.SearchStrategy optimizer_class: The SearchStrategy optimizer to use for the hyper-parameter search :param class.SearchStrategy optimizer_class: The SearchStrategy optimizer to use for the hyperparameter search
:param int max_number_of_concurrent_tasks: The maximum number of concurrent Tasks (experiments) running at the :param int max_number_of_concurrent_tasks: The maximum number of concurrent Tasks (experiments) running at the
same time. same time.
:param str execution_queue: The execution queue to use for launching Tasks (experiments). :param str execution_queue: The execution queue to use for launching Tasks (experiments).
@ -1516,7 +1516,7 @@ class HyperParameterOptimizer(object):
:param int top_k: The number of Tasks (experiments) to return. :param int top_k: The number of Tasks (experiments) to return.
:param all_metrics: Default False, only return the objective metric on the metrics dictionary. :param all_metrics: Default False, only return the objective metric on the metrics dictionary.
If True, return all scalar metrics of the experiment If True, return all scalar metrics of the experiment
:param all_hyper_parameters: Default False. If True, return all the hyper-parameters from all the sections. :param all_hyper_parameters: Default False. If True, return all the hyperparameters from all the sections.
:param only_completed: return only completed Tasks. Default False. :param only_completed: return only completed Tasks. Default False.
:return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance, :return: A list of dictionaries ({task_id: '', hyper_parameters: {}, metrics: {}}), ordered by performance,

View File

@ -15,7 +15,7 @@ class RandomSeed(object):
def set_random_seed(seed=1337): def set_random_seed(seed=1337):
# type: (int) -> () # type: (int) -> ()
""" """
Set global seed for all hyper-parameter strategy random number sampling. Set global seed for all hyperparameter strategy random number sampling.
:param int seed: The random seed. :param int seed: The random seed.
""" """
@ -26,7 +26,7 @@ class RandomSeed(object):
def get_random_seed(): def get_random_seed():
# type: () -> int # type: () -> int
""" """
Get the global seed for all hyper-parameter strategy random number sampling. Get the global seed for all hyperparameter strategy random number sampling.
:return: The random seed. :return: The random seed.
""" """
@ -35,14 +35,14 @@ class RandomSeed(object):
class Parameter(RandomSeed): class Parameter(RandomSeed):
""" """
The base hyper-parameter optimization object. The base hyperparameter optimization object.
""" """
_class_type_serialize_name = 'type' _class_type_serialize_name = 'type'
def __init__(self, name): def __init__(self, name):
# type: (Optional[str]) -> () # type: (Optional[str]) -> ()
""" """
Create a new Parameter for hyper-parameter optimization Create a new Parameter for hyperparameter optimization
:param str name: The new Parameter name. This is the parameter name that will be passed to a Task. :param str name: The new Parameter name. This is the parameter name that will be passed to a Task.
""" """
@ -125,7 +125,7 @@ class UniformParameterRange(Parameter):
""" """
Create a parameter to be sampled by the SearchStrategy Create a parameter to be sampled by the SearchStrategy
:param str name: The parameter name. Match the Task hyper-parameter name. :param str name: The parameter name. Match the Task hyperparameter name.
:param float min_value: The minimum sample to use for uniform random sampling. :param float min_value: The minimum sample to use for uniform random sampling.
:param float max_value: The maximum sample to use for uniform random sampling. :param float max_value: The maximum sample to use for uniform random sampling.
:param float step_size: If not ``None``, set step size (quantization) for value sampling. :param float step_size: If not ``None``, set step size (quantization) for value sampling.
@ -172,7 +172,7 @@ class UniformParameterRange(Parameter):
class LogUniformParameterRange(UniformParameterRange): class LogUniformParameterRange(UniformParameterRange):
""" """
Logarithmic uniform randomly sampled hyper-parameter object. Logarithmic uniform randomly sampled hyperparameter object.
""" """
def __init__( def __init__(
@ -188,7 +188,7 @@ class LogUniformParameterRange(UniformParameterRange):
""" """
Create a parameter to be sampled by the SearchStrategy Create a parameter to be sampled by the SearchStrategy
:param str name: The parameter name. Match the Task hyper-parameter name. :param str name: The parameter name. Match the Task hyperparameter name.
:param float min_value: The minimum exponent sample to use for uniform random sampling. :param float min_value: The minimum exponent sample to use for uniform random sampling.
:param float max_value: The maximum exponent sample to use for uniform random sampling. :param float max_value: The maximum exponent sample to use for uniform random sampling.
:param float base: The base used to raise the sampled exponent. :param float base: The base used to raise the sampled exponent.
@ -228,7 +228,7 @@ class UniformIntegerParameterRange(Parameter):
""" """
Create a parameter to be sampled by the SearchStrategy. Create a parameter to be sampled by the SearchStrategy.
:param str name: The parameter name. Match the task hyper-parameter name. :param str name: The parameter name. Match the task hyperparameter name.
:param int min_value: The minimum sample to use for uniform random sampling. :param int min_value: The minimum sample to use for uniform random sampling.
:param int max_value: The maximum sample to use for uniform random sampling. :param int max_value: The maximum sample to use for uniform random sampling.
:param int step_size: The default step size is ``1``. :param int step_size: The default step size is ``1``.
@ -272,7 +272,7 @@ class UniformIntegerParameterRange(Parameter):
class DiscreteParameterRange(Parameter): class DiscreteParameterRange(Parameter):
""" """
Discrete randomly sampled hyper-parameter object. Discrete randomly sampled hyperparameter object.
""" """
def __init__(self, name, values=()): def __init__(self, name, values=()):
@ -280,7 +280,7 @@ class DiscreteParameterRange(Parameter):
""" """
Uniformly sample values form a list of discrete options. Uniformly sample values form a list of discrete options.
:param str name: The parameter name. Match the task hyper-parameter name. :param str name: The parameter name. Match the task hyperparameter name.
:param list values: The list/tuple of valid parameter values to sample from. :param list values: The list/tuple of valid parameter values to sample from.
""" """
super(DiscreteParameterRange, self).__init__(name=name) super(DiscreteParameterRange, self).__init__(name=name)

View File

@ -2160,7 +2160,7 @@ class OutputModel(BaseModel):
# type: (str) -> None # type: (str) -> None
""" """
Set the URI of the storage destination for uploaded model weight files. Set the URI of the storage destination for uploaded model weight files.
Supported storage destinations include S3, Google Cloud Storage), and file locations. Supported storage destinations include S3, Google Cloud Storage, and file locations.
Using this method, file uploads are separate and then a link to each is stored in the model object. Using this method, file uploads are separate and then a link to each is stored in the model object.

View File

@ -12,7 +12,7 @@ Using the **ClearML** [Logger](https://github.com/allegroai/clearml/blob/master/
* [Surface diagrams](#surface-diagrams) * [Surface diagrams](#surface-diagrams)
* [Images](#images) * [Images](#images)
* Track hyper-parameters and OS environment variables * Track hyperparameters and OS environment variables
* Logging experiment parameter [dictionaries](#logging-experiment-parameter-dictionaries) * Logging experiment parameter [dictionaries](#logging-experiment-parameter-dictionaries)
* Specifying [environment variables](#specifying-environment-variables-to-track) to track * Specifying [environment variables](#specifying-environment-variables-to-track) to track
@ -819,7 +819,7 @@ def report_surface(self, title, series, matrix, iteration, xlabels=None, ylabels
### Images ### Images
Use to report an image and upload its contents to the bucket specified in the **ClearML** configuration file, Use to report an image and upload its contents to the bucket specified in the **ClearML** configuration file,
or a [a default upload destination](#set-default-upload-destination), if you set a default. or a [default upload destination](#set-default-upload-destination), if you set a default.
First [get the current logger](#get-the-current-logger) and then use it (see an [example script](https://github.com/allegroai/clearml/blob/master/examples/manual_reporting.py)) with the following method. First [get the current logger](#get-the-current-logger) and then use it (see an [example script](https://github.com/allegroai/clearml/blob/master/examples/manual_reporting.py)) with the following method.
@ -925,13 +925,13 @@ def report_image(self, title, series, iteration, local_path=None, matrix=None, m
</tbody> </tbody>
</table> </table>
## Hyper-parameters and Environment Variables ## Hyperparameters and Environment Variables
### Logging Experiment Parameter Dictionaries ### Logging Experiment Parameter Dictionaries
In order for **ClearML** to log a dictionary of parameters, use the `Task.connect` method. In order for **ClearML** to log a dictionary of parameters, use the `Task.connect` method.
For example, to log the hyper-parameters <code>learning_rate</code>, <code>batch_size</code>, <code>display_step</code>, <code>model_path</code>, <code>n_hidden_1</code>, and <code>n_hidden_2</code>: For example, to log the hyperparameters <code>learning_rate</code>, <code>batch_size</code>, <code>display_step</code>, <code>model_path</code>, <code>n_hidden_1</code>, and <code>n_hidden_2</code>:
```python ```python
# Create a dictionary of parameters # Create a dictionary of parameters

View File

@ -3,7 +3,7 @@ try:
from lightning.pytorch.demos.boring_classes import DemoModel, BoringDataModule from lightning.pytorch.demos.boring_classes import DemoModel, BoringDataModule
except ImportError: except ImportError:
import sys import sys
print("Module 'lightning' not installed (only available for Python 3.8+") print("Module 'lightning' not installed (only available for Python 3.8+)")
sys.exit(0) sys.exit(0)
from clearml import Task from clearml import Task

View File

@ -65,7 +65,7 @@ if __name__ == '__main__':
parser = LitClassifier.add_model_specific_args(parser) parser = LitClassifier.add_model_specific_args(parser)
args = parser.parse_args() args = parser.parse_args()
Task.init(project_name="examples-internal", task_name="lightning checkpoint issue and argparser") Task.init(project_name="examples", task_name="pytorch lightning MNIST")
# ------------ # ------------
# data # data