mirror of
https://github.com/clearml/clearml
synced 2025-02-07 13:23:40 +00:00
Make HPO example more readable
This commit is contained in:
parent
1b3574a7ca
commit
14e38e8a46
@ -5,23 +5,15 @@ from clearml.automation import (
|
|||||||
DiscreteParameterRange, HyperParameterOptimizer, RandomSearch,
|
DiscreteParameterRange, HyperParameterOptimizer, RandomSearch,
|
||||||
UniformIntegerParameterRange)
|
UniformIntegerParameterRange)
|
||||||
|
|
||||||
aSearchStrategy = None
|
# trying to load Bayesian optimizer package
|
||||||
|
|
||||||
if not aSearchStrategy:
|
|
||||||
try:
|
try:
|
||||||
from clearml.automation.optuna import OptimizerOptuna
|
from clearml.automation.optuna import OptimizerOptuna # noqa
|
||||||
aSearchStrategy = OptimizerOptuna
|
aSearchStrategy = OptimizerOptuna
|
||||||
except ImportError as ex:
|
except ImportError as ex:
|
||||||
pass
|
|
||||||
|
|
||||||
if not aSearchStrategy:
|
|
||||||
try:
|
try:
|
||||||
from clearml.automation.hpbandster import OptimizerBOHB
|
from clearml.automation.hpbandster import OptimizerBOHB # noqa
|
||||||
aSearchStrategy = OptimizerBOHB
|
aSearchStrategy = OptimizerBOHB
|
||||||
except ImportError as ex:
|
except ImportError as ex:
|
||||||
pass
|
|
||||||
|
|
||||||
if not aSearchStrategy:
|
|
||||||
logging.getLogger().warning(
|
logging.getLogger().warning(
|
||||||
'Apologies, it seems you do not have \'optuna\' or \'hpbandster\' installed, '
|
'Apologies, it seems you do not have \'optuna\' or \'hpbandster\' installed, '
|
||||||
'we will be using RandomSearch strategy instead')
|
'we will be using RandomSearch strategy instead')
|
||||||
@ -59,6 +51,10 @@ if not args['template_task_id']:
|
|||||||
args['template_task_id'] = Task.get_task(
|
args['template_task_id'] = Task.get_task(
|
||||||
project_name='examples', task_name='Keras HP optimization base').id
|
project_name='examples', task_name='Keras HP optimization base').id
|
||||||
|
|
||||||
|
# Set default queue name for the Training tasks themselves.
|
||||||
|
# later can be overridden in the UI
|
||||||
|
execution_queue = '1xGPU'
|
||||||
|
|
||||||
# Example use case:
|
# Example use case:
|
||||||
an_optimizer = HyperParameterOptimizer(
|
an_optimizer = HyperParameterOptimizer(
|
||||||
# This is the experiment we want to optimize
|
# This is the experiment we want to optimize
|
||||||
@ -89,7 +85,7 @@ an_optimizer = HyperParameterOptimizer(
|
|||||||
# more are coming soon...
|
# more are coming soon...
|
||||||
optimizer_class=aSearchStrategy,
|
optimizer_class=aSearchStrategy,
|
||||||
# Select an execution queue to schedule the experiments for execution
|
# Select an execution queue to schedule the experiments for execution
|
||||||
execution_queue='1xGPU',
|
execution_queue=execution_queue,
|
||||||
# Optional: Limit the execution time of a single experiment, in minutes.
|
# Optional: Limit the execution time of a single experiment, in minutes.
|
||||||
# (this is optional, and if using OptimizerBOHB, it is ignored)
|
# (this is optional, and if using OptimizerBOHB, it is ignored)
|
||||||
time_limit_per_job=10.,
|
time_limit_per_job=10.,
|
||||||
|
Loading…
Reference in New Issue
Block a user