mirror of
https://github.com/clearml/clearml
synced 2025-04-08 06:34:37 +00:00
Add LogUnifomParameterRange for hyperparameter optimization with optuna (#462)
* Add LogUnifomParameterRange for hyperparameter optimization with optuna * Add self.base and changed OptimizerOptuna to correctly create log uniform hyperparams. * Add docstring to LogUniformParameterRange class. Co-authored-by: Pereyra, Diego <9011013@ulta.com>
This commit is contained in:
parent
4ee044f020
commit
1f82b0c401
@ -1,4 +1,5 @@
|
||||
from .parameters import UniformParameterRange, DiscreteParameterRange, UniformIntegerParameterRange, ParameterSet
|
||||
from .parameters import (UniformParameterRange, DiscreteParameterRange, UniformIntegerParameterRange, ParameterSet,
|
||||
LogUniformParameterRange)
|
||||
from .optimization import GridSearch, RandomSearch, HyperParameterOptimizer, Objective
|
||||
from .job import ClearmlJob
|
||||
from .controller import PipelineController
|
||||
@ -6,5 +7,5 @@ from .scheduler import TaskScheduler
|
||||
from .trigger import TriggerScheduler
|
||||
|
||||
__all__ = ["UniformParameterRange", "DiscreteParameterRange", "UniformIntegerParameterRange", "ParameterSet",
|
||||
"GridSearch", "RandomSearch", "HyperParameterOptimizer", "Objective", "ClearmlJob", "PipelineController",
|
||||
"TaskScheduler", "TriggerScheduler"]
|
||||
"LogUniformParameterRange", "GridSearch", "RandomSearch", "HyperParameterOptimizer", "Objective",
|
||||
"ClearmlJob", "PipelineController", "TaskScheduler", "TriggerScheduler"]
|
||||
|
@ -2,7 +2,8 @@ from time import sleep
|
||||
from typing import Any, Optional, Sequence
|
||||
|
||||
from ..optimization import Objective, SearchStrategy
|
||||
from ..parameters import (DiscreteParameterRange, Parameter, UniformIntegerParameterRange, UniformParameterRange)
|
||||
from ..parameters import (DiscreteParameterRange, Parameter, UniformIntegerParameterRange, UniformParameterRange,
|
||||
LogUniformParameterRange)
|
||||
from ...task import Task
|
||||
|
||||
try:
|
||||
@ -193,7 +194,10 @@ class OptimizerOptuna(SearchStrategy):
|
||||
# type: () -> dict
|
||||
cs = {}
|
||||
for p in self._hyper_parameters:
|
||||
if isinstance(p, UniformParameterRange):
|
||||
if isinstance(p, LogUniformParameterRange):
|
||||
hp_type = 'suggest_float'
|
||||
hp_params = dict(low=p.base**p.min_value, high=p.base**p.max_value, log=True, step=None)
|
||||
elif isinstance(p, UniformParameterRange):
|
||||
if p.include_max and p.step_size:
|
||||
hp_type = 'suggest_discrete_uniform'
|
||||
hp_params = dict(low=p.min_value, high=p.max_value, q=p.step_size)
|
||||
|
@ -171,6 +171,54 @@ class UniformParameterRange(Parameter):
|
||||
return [{self.name: v} for v in values]
|
||||
|
||||
|
||||
class LogUniformParameterRange(UniformParameterRange):
|
||||
"""
|
||||
Logarithmic uniform randomly sampled hyper-parameter object.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name, # type: str
|
||||
min_value, # type: float
|
||||
max_value, # type: float
|
||||
base=10, # type: float
|
||||
step_size=None, # type: Optional[float]
|
||||
include_max_value=True # type: bool
|
||||
):
|
||||
# type: (...) -> ()
|
||||
"""
|
||||
Create a parameter to be sampled by the SearchStrategy
|
||||
|
||||
:param str name: The parameter name. Match the Task hyper-parameter name.
|
||||
:param float min_value: The minimum exponent sample to use for uniform random sampling.
|
||||
:param float max_value: The maximum exponent sample to use for uniform random sampling.
|
||||
:param float base: The base used to raise the sampled exponent.
|
||||
:param float step_size: If not ``None``, set step size (quantization) for value sampling.
|
||||
:param bool include_max_value: Range includes the ``max_value``
|
||||
|
||||
The values are:
|
||||
|
||||
- ``True`` - The range includes the ``max_value`` (Default)
|
||||
- ``False`` - Does not include.
|
||||
|
||||
"""
|
||||
super().__init__(name, min_value, max_value, step_size=step_size, include_max_value=include_max_value)
|
||||
self.base = base
|
||||
|
||||
def get_value(self):
|
||||
"""
|
||||
Return uniformly logarithmic sampled value based on object sampling definitions.
|
||||
|
||||
:return: {self.name: random value self.base^[self.min_value, self.max_value)}
|
||||
"""
|
||||
values_dict = super().get_value()
|
||||
return {self.name: self.base**v for v in values_dict.values()}
|
||||
|
||||
def to_list(self):
|
||||
values_list = super().to_list()
|
||||
return [{self.name: self.base**v[self.name]} for v in values_list]
|
||||
|
||||
|
||||
class UniformIntegerParameterRange(Parameter):
|
||||
"""
|
||||
Uniform randomly sampled integer Hyper-Parameter object.
|
||||
|
Loading…
Reference in New Issue
Block a user