mirror of
https://github.com/clearml/clearml-docs
synced 2025-02-07 13:21:46 +00:00
Add Python syntax highlighting (#68)
This commit is contained in:
parent
7c8fd227d3
commit
f94cda0c98
@ -35,6 +35,7 @@ The example code attempts to import `OptimizerOptuna` for the search strategy. I
|
|||||||
installed, it attempts to import `OptimizerBOHB`. If `clearml.automation.hpbandster` is not installed, it uses
|
installed, it attempts to import `OptimizerBOHB`. If `clearml.automation.hpbandster` is not installed, it uses
|
||||||
the `RandomSearch` for the search strategy.
|
the `RandomSearch` for the search strategy.
|
||||||
|
|
||||||
|
```python
|
||||||
aSearchStrategy = None
|
aSearchStrategy = None
|
||||||
|
|
||||||
if not aSearchStrategy:
|
if not aSearchStrategy:
|
||||||
@ -56,12 +57,14 @@ the `RandomSearch` for the search strategy.
|
|||||||
'Apologies, it seems you do not have \'optuna\' or \'hpbandster\' installed, '
|
'Apologies, it seems you do not have \'optuna\' or \'hpbandster\' installed, '
|
||||||
'we will be using RandomSearch strategy instead')
|
'we will be using RandomSearch strategy instead')
|
||||||
aSearchStrategy = RandomSearch
|
aSearchStrategy = RandomSearch
|
||||||
|
```
|
||||||
|
|
||||||
## Define a Callback
|
## Define a Callback
|
||||||
|
|
||||||
When the optimization starts, a callback is provided that returns the best performing set of hyperparameters. In the script,
|
When the optimization starts, a callback is provided that returns the best performing set of hyperparameters. In the script,
|
||||||
the `job_complete_callback` function returns the ID of `top_performance_job_id`.
|
the `job_complete_callback` function returns the ID of `top_performance_job_id`.
|
||||||
|
|
||||||
|
```python
|
||||||
def job_complete_callback(
|
def job_complete_callback(
|
||||||
job_id, # type: str
|
job_id, # type: str
|
||||||
objective_value, # type: float
|
objective_value, # type: float
|
||||||
@ -72,6 +75,7 @@ the `job_complete_callback` function returns the ID of `top_performance_job_id`.
|
|||||||
print('Job completed!', job_id, objective_value, objective_iteration, job_parameters)
|
print('Job completed!', job_id, objective_value, objective_iteration, job_parameters)
|
||||||
if job_id == top_performance_job_id:
|
if job_id == top_performance_job_id:
|
||||||
print('WOOT WOOT we broke the record! Objective reached {}'.format(objective_value))
|
print('WOOT WOOT we broke the record! Objective reached {}'.format(objective_value))
|
||||||
|
```
|
||||||
|
|
||||||
## Initialize the Optimization Task
|
## Initialize the Optimization Task
|
||||||
|
|
||||||
@ -83,11 +87,13 @@ We set the Task type to optimizer, and create a new experiment (and Task object)
|
|||||||
When the code runs, it creates an experiment named **Automatic Hyper-Parameter Optimization** that is associated with
|
When the code runs, it creates an experiment named **Automatic Hyper-Parameter Optimization** that is associated with
|
||||||
the project **Hyper-Parameter Optimization**, which can be seen in the **ClearML Web UI**.
|
the project **Hyper-Parameter Optimization**, which can be seen in the **ClearML Web UI**.
|
||||||
|
|
||||||
|
```python
|
||||||
# Connecting CLEARML
|
# Connecting CLEARML
|
||||||
task = Task.init(project_name='Hyper-Parameter Optimization',
|
task = Task.init(project_name='Hyper-Parameter Optimization',
|
||||||
task_name='Automatic Hyper-Parameter Optimization',
|
task_name='Automatic Hyper-Parameter Optimization',
|
||||||
task_type=Task.TaskTypes.optimizer,
|
task_type=Task.TaskTypes.optimizer,
|
||||||
reuse_last_task_id=False)
|
reuse_last_task_id=False)
|
||||||
|
```
|
||||||
|
|
||||||
## Set Up the Arguments
|
## Set Up the Arguments
|
||||||
|
|
||||||
@ -100,6 +106,7 @@ least once so that it is stored in **ClearML Server**, and, therefore, can be cl
|
|||||||
Since the arguments dictionary is connected to the Task, after the code runs once, the `template_task_id` can be changed
|
Since the arguments dictionary is connected to the Task, after the code runs once, the `template_task_id` can be changed
|
||||||
to optimize a different experiment, see [tuning experiments](../../../webapp/webapp_exp_tuning.md).
|
to optimize a different experiment, see [tuning experiments](../../../webapp/webapp_exp_tuning.md).
|
||||||
|
|
||||||
|
```python
|
||||||
# experiment template to optimize in the hyper-parameter optimization
|
# experiment template to optimize in the hyper-parameter optimization
|
||||||
args = {
|
args = {
|
||||||
'template_task_id': None,
|
'template_task_id': None,
|
||||||
@ -111,46 +118,53 @@ to optimize a different experiment, see [tuning experiments](../../../webapp/web
|
|||||||
if not args['template_task_id']:
|
if not args['template_task_id']:
|
||||||
args['template_task_id'] = Task.get_task(
|
args['template_task_id'] = Task.get_task(
|
||||||
project_name='examples', task_name='Keras HP optimization base').id
|
project_name='examples', task_name='Keras HP optimization base').id
|
||||||
|
```
|
||||||
|
|
||||||
## Instantiate the Optimizer Object
|
## Creating the Optimizer Object
|
||||||
|
|
||||||
Instantiate an [automation.optimization.HyperParameterOptimizer](../../../references/sdk/hpo_optimization_hyperparameteroptimizer.md)
|
Initialize an [automation.optimization.HyperParameterOptimizer](../../../references/sdk/hpo_optimization_hyperparameteroptimizer.md)
|
||||||
object, setting the optimization parameters, beginning with the ID of the experiment to optimize.
|
object, setting the optimization parameters, beginning with the ID of the experiment to optimize.
|
||||||
|
|
||||||
|
```python
|
||||||
an_optimizer = HyperParameterOptimizer(
|
an_optimizer = HyperParameterOptimizer(
|
||||||
# This is the experiment we want to optimize
|
# This is the experiment we want to optimize
|
||||||
base_task_id=args['template_task_id'],
|
base_task_id=args['template_task_id'],
|
||||||
|
```
|
||||||
|
|
||||||
Set the hyperparameter ranges to sample, instantiating them as **ClearML** automation objects using [automation.parameters.UniformIntegerParameterRange](../../../references/sdk/hpo_parameters_uniformintegerparameterrange.md)
|
Set the hyperparameter ranges to sample, instantiating them as **ClearML** automation objects using [automation.parameters.UniformIntegerParameterRange](../../../references/sdk/hpo_parameters_uniformintegerparameterrange.md)
|
||||||
and [automation.parameters.DiscreteParameterRange](../../../references/sdk/hpo_parameters_discreteparameterrange.md).
|
and [automation.parameters.DiscreteParameterRange](../../../references/sdk/hpo_parameters_discreteparameterrange.md).
|
||||||
|
|
||||||
|
```python
|
||||||
hyper_parameters=[
|
hyper_parameters=[
|
||||||
UniformIntegerParameterRange('layer_1', min_value=128, max_value=512, step_size=128),
|
UniformIntegerParameterRange('layer_1', min_value=128, max_value=512, step_size=128),
|
||||||
UniformIntegerParameterRange('layer_2', min_value=128, max_value=512, step_size=128),
|
UniformIntegerParameterRange('layer_2', min_value=128, max_value=512, step_size=128),
|
||||||
DiscreteParameterRange('batch_size', values=[96, 128, 160]),
|
DiscreteParameterRange('batch_size', values=[96, 128, 160]),
|
||||||
DiscreteParameterRange('epochs', values=[30]),
|
DiscreteParameterRange('epochs', values=[30]),
|
||||||
],
|
],
|
||||||
|
```
|
||||||
|
|
||||||
Set the metric to optimize and the optimization objective.
|
Set the metric to optimize and the optimization objective.
|
||||||
|
|
||||||
|
```python
|
||||||
objective_metric_title='val_acc',
|
objective_metric_title='val_acc',
|
||||||
objective_metric_series='val_acc',
|
objective_metric_series='val_acc',
|
||||||
objective_metric_sign='max',
|
objective_metric_sign='max',
|
||||||
|
```
|
||||||
|
|
||||||
Set the number of concurrent Tasks.
|
Set the number of concurrent Tasks.
|
||||||
|
```python
|
||||||
max_number_of_concurrent_tasks=2,
|
max_number_of_concurrent_tasks=2,
|
||||||
|
```
|
||||||
Set the optimization strategy, see [Set the search strategy for optimization](#set-the-search-strategy-for-optimization).
|
Set the optimization strategy, see [Set the search strategy for optimization](#set-the-search-strategy-for-optimization).
|
||||||
|
```python
|
||||||
optimizer_class=aSearchStrategy,
|
optimizer_class=aSearchStrategy,
|
||||||
|
```
|
||||||
Specify the queue to use for remote execution. This is overridden if the optimizer runs as a service.
|
Specify the queue to use for remote execution. This is overridden if the optimizer runs as a service.
|
||||||
|
```python
|
||||||
execution_queue='1xGPU',
|
execution_queue='1xGPU',
|
||||||
|
```
|
||||||
Specify the remaining parameters, including the time limit per Task (minutes), period for checking the optimization (minutes), maximum number of jobs to launch, minimum and maximum number of iterations for each Task.
|
Specify the remaining parameters, including the time limit per Task (minutes), period for checking the optimization (minutes), maximum number of jobs to launch, minimum and maximum number of iterations for each Task.
|
||||||
|
```python
|
||||||
# Optional: Limit the execution time of a single experiment, in minutes.
|
# Optional: Limit the execution time of a single experiment, in minutes.
|
||||||
# (this is optional, and if using OptimizerBOHB, it is ignored)
|
# (this is optional, and if using OptimizerBOHB, it is ignored)
|
||||||
time_limit_per_job=10.,
|
time_limit_per_job=10.,
|
||||||
@ -168,6 +182,10 @@ Specify the remaining parameters, including the time limit per Task (minutes), p
|
|||||||
# (This is optional, unless using OptimizerBOHB where this is a must)
|
# (This is optional, unless using OptimizerBOHB where this is a must)
|
||||||
max_iteration_per_job=30,
|
max_iteration_per_job=30,
|
||||||
|
|
||||||
|
) # done creating HyperParameterOptimizer
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
<a class="tr_top_negative" name="service"></a>
|
<a class="tr_top_negative" name="service"></a>
|
||||||
|
|
||||||
## Running as a Service
|
## Running as a Service
|
||||||
@ -176,22 +194,26 @@ The optimization can run as a service, if the `run_as_service` argument is set t
|
|||||||
running as a service, see [ClearML Agent services container](../../../clearml_agent.md#services-mode)
|
running as a service, see [ClearML Agent services container](../../../clearml_agent.md#services-mode)
|
||||||
on "Concepts and Architecture" page.
|
on "Concepts and Architecture" page.
|
||||||
|
|
||||||
|
```python
|
||||||
# if we are running as a service, just enqueue ourselves into the services queue and let it run the optimization
|
# if we are running as a service, just enqueue ourselves into the services queue and let it run the optimization
|
||||||
if args['run_as_service']:
|
if args['run_as_service']:
|
||||||
# if this code is executed by `clearml-agent` the function call does nothing.
|
# if this code is executed by `clearml-agent` the function call does nothing.
|
||||||
# if executed locally, the local process will be terminated, and a remote copy will be executed instead
|
# if executed locally, the local process will be terminated, and a remote copy will be executed instead
|
||||||
task.execute_remotely(queue_name='services', exit_process=True)
|
task.execute_remotely(queue_name='services', exit_process=True)
|
||||||
|
```
|
||||||
|
|
||||||
## Optimize
|
## Optimize
|
||||||
|
|
||||||
The optimizer is ready. Set the report period and start it, providing the callback method to report the best performance.
|
The optimizer is ready. Set the report period and start it, providing the callback method to report the best performance.
|
||||||
|
|
||||||
|
```python
|
||||||
# report every 12 seconds, this is way too often, but we are testing here J
|
# report every 12 seconds, this is way too often, but we are testing here J
|
||||||
an_optimizer.set_report_period(0.2)
|
an_optimizer.set_report_period(0.2)
|
||||||
# start the optimization process, callback function to be called every time an experiment is completed
|
# start the optimization process, callback function to be called every time an experiment is completed
|
||||||
# this function returns immediately
|
# this function returns immediately
|
||||||
an_optimizer.start(job_complete_callback=job_complete_callback)
|
an_optimizer.start(job_complete_callback=job_complete_callback)
|
||||||
# set the time limit for the optimization process (2 hours)
|
# set the time limit for the optimization process (2 hours)
|
||||||
|
```
|
||||||
|
|
||||||
Now that it is running:
|
Now that it is running:
|
||||||
1. Set a time limit for optimization
|
1. Set a time limit for optimization
|
||||||
@ -200,7 +222,7 @@ Now that it is running:
|
|||||||
1. Print the best performance
|
1. Print the best performance
|
||||||
1. Stop the optimizer.
|
1. Stop the optimizer.
|
||||||
|
|
||||||
|
```python
|
||||||
# set the time limit for the optimization process (2 hours)
|
# set the time limit for the optimization process (2 hours)
|
||||||
an_optimizer.set_time_limit(in_minutes=90.0)
|
an_optimizer.set_time_limit(in_minutes=90.0)
|
||||||
# wait until process is done (notice we are controlling the optimization process in the background)
|
# wait until process is done (notice we are controlling the optimization process in the background)
|
||||||
@ -212,3 +234,4 @@ Now that it is running:
|
|||||||
an_optimizer.stop()
|
an_optimizer.stop()
|
||||||
|
|
||||||
print('We are done, good bye')
|
print('We are done, good bye')
|
||||||
|
```
|
||||||
|
Loading…
Reference in New Issue
Block a user