keras tune 使用
安装
pip install keras-tuner [–upgrade]
import kerastuner as kt
from tensorflow import keras
写函数的时候使用hp来指代超参,
tuner = kt.RandomSearch(bulid_model,objective=‘val_loss’,max_trial=5)
上面使用随机搜索通过检测验证损失来尝试五个不同模型
tuner.search(x_train, y_train, epochs=5, validation_data=(x_val, y_val))
best_model = tuner.get_best_models()[0]
搜索模型超参,按照目标度量来进行排序
tuner.search_space_summary()
查看参数搜索空间
例子
def build_model(hp):
model = keras.Sequential()
model.add(layers.Flatten())
for i in range(hp.Int("num_layers", 2, 20)):
model.add(
layers.Dense(
units=hp.Int("units_" + str(i), min_value=32, max_value=512, step=32),
activation="relu",
)
)
model.add(layers.Dense(10, activation="softmax"))
model.compile(
optimizer=keras.optimizers.Adam(hp.Choice("learning_rate", [1e-2, 1e-3, 1e-4])),
loss="categorical_crossentropy",
metrics=["accuracy"],
)
return model
使用HyperModel类的子类来
from keras_tuner import HyperModel
class MyHyperModel(HyperModel):
def __init__(self, classes):
self.classes = classes
def build(self, hp):
model = keras.Sequential()
model.add(layers.Flatten())
model.add(
layers.Dense(
units=hp.Int("units", min_value=32, max_value=512, step=32),
activation="relu",
)
)
model.add(layers.Dense(self.classes, activation="softmax"))
model.compile(
optimizer=keras.optimizers.Adam(
hp.Choice("learning_rate", values=[1e-2, 1e-3, 1e-4])
),
loss="categorical_crossentropy",
metrics=["accuracy"],
)
return model
hypermodel = MyHyperModel(classes=10)
tuner = RandomSearch(
hypermodel,
objective="val_accuracy",
max_trials=3,
overwrite=True,
directory="my_dir",
project_name="helloworld",
)
tuner.search(x_train, y_train, epochs=2, validation_data=(x_val, y_val))
只要重写build方法就能实现很好的模型共享和复用
主要是hp.Int(name,min_value,max_value,step,default)
和hp.choice(name,value(这应该是个可以迭代的对象))
hp = HyperParameters()