From 6129e09b507dc0bd18642ceaee383823f241a422 Mon Sep 17 00:00:00 2001 From: allegroai <> Date: Thu, 2 Jul 2020 01:25:55 +0300 Subject: [PATCH] Fix Hyper-Parameter optimization get_top_experiments() order --- trains/automation/optimization.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/trains/automation/optimization.py b/trains/automation/optimization.py index 959dfa87..4f15c543 100644 --- a/trains/automation/optimization.py +++ b/trains/automation/optimization.py @@ -174,7 +174,7 @@ class Objective(object): series = hashlib.md5(str(self.series).encode('utf-8')).hexdigest() self._metric = title, series return '{}last_metrics.{}.{}.{}'.format( - '-' if self.sign < 0 else '', self._metric[0], self._metric[1], + '-' if self.sign > 0 else '', self._metric[0], self._metric[1], ('min_value' if self.sign < 0 else 'max_value') if self.extremum else 'value') @@ -1316,7 +1316,9 @@ class HyperParameterOptimizer(object): df = pd.DataFrame(table, index=index) df.sort_values(by='objective', ascending=bool(self.objective_metric.sign < 0), inplace=True) df.index.name = 'task id' - task_logger.report_table("summary", "job", 0, table_plot=df) + task_logger.report_table( + "summary", "job", 0, table_plot=df, + extra_layout={"title": "objective: {}".format(title)}) # if we should leave, stop everything now. if timeout < 0: