我使用Keras
调谐器的BayesianOptimization
来搜索模型的最佳超参数,我还使用TensorBoard
回调来可视化每个模型/试验的性能。
然而,调谐器的试验被奇怪地命名/标记(例如trial_1dc4838863f2e4e8a84f0e415ee1db33(. 是否有一种方法,我可以有调谐器命名试验仅为">trial_1",">trial_2"等?而不是后面的数字和字母?
我在Keras
文档中找不到任何地方如何做到这一点,或者在创建Tuner实例时是否有一个参数。
我能够通过覆盖BayesianOptimization
和BayesianOptimizationOracle
类来解决这个问题。它只是将每个试验命名为"0"、"1"、"2"等。
但如果这更灵活就好了,因为我可能最终会为其他hypertuner方法这样做。。
from kerastuner.engine import trial as trial_lib
from kerastuner.tuners import BayesianOptimization
from kerastuner.tuners.bayesian import
BayesianOptimization, BayesianOptimizationOracle
class CustomBayesianOptimizationOracle(BayesianOptimizationOracle):
def __init__(self,
objective,
max_trials,
num_initial_points=None,
alpha=1e-4,
beta=2.6,
seed=None,
hyperparameters=None,
allow_new_entries=True,
tune_new_entries=True):
super(CustomBayesianOptimizationOracle, self).__init__(
objective=objective,
max_trials=max_trials,
num_initial_points=num_initial_points,
alpha=alpha,
beta=beta,
seed=seed,
hyperparameters=hyperparameters,
tune_new_entries=tune_new_entries,
allow_new_entries=allow_new_entries)
self.trial_id = '0'
def create_trial(self, tuner_id):
"""Create a new `Trial` to be run by the `Tuner`.
A `Trial` corresponds to a unique set of hyperparameters to be run
by `Tuner.run_trial`.
Args:
tuner_id: A ID that identifies the `Tuner` requesting a
`Trial`. `Tuners` that should run the same trial (for instance,
when running a multi-worker model) should have the same ID.
Returns:
A `Trial` object containing a set of hyperparameter values to run
in a `Tuner`.
"""
# Allow for multi-worker DistributionStrategy within a Trial.
if tuner_id in self.ongoing_trials:
return self.ongoing_trials[tuner_id]
if self.max_trials and len(self.trials) >= self.max_trials:
status = trial_lib.TrialStatus.STOPPED
values = None
else:
response = self._populate_space(self.trial_id)
status = response['status']
values = response['values'] if 'values' in response else None
hyperparameters = self.hyperparameters.copy()
hyperparameters.values = values or {}
trial = trial_lib.Trial(
hyperparameters=hyperparameters,
trial_id=self.trial_id,
status=status)
if status == trial_lib.TrialStatus.RUNNING:
self.ongoing_trials[tuner_id] = trial
self.trials[self.trial_id] = trial
self._save_trial(trial)
self.save()
self.trial_id = str(int(self.trial_id) + 1)
return trial
class CustomBayesianOptimization(BayesianOptimization):
def __init__(self,
hypermodel,
objective,
max_trials,
num_initial_points=2,
seed=None,
hyperparameters=None,
tune_new_entries=True,
allow_new_entries=True,
**kwargs):
oracle = CustomBayesianOptimizationOracle(
objective=objective,
max_trials=max_trials,
num_initial_points=num_initial_points,
seed=seed,
hyperparameters=hyperparameters,
tune_new_entries=tune_new_entries,
allow_new_entries=allow_new_entries)
super(BayesianOptimization, self).__init__(
oracle=oracle,
hypermodel=hypermodel,
**kwargs)