keras动态调整学习率

keras随着epoch及loss变化情况动态调整学习率,主要利用Callback, ReduceLROnPlateau来实现

import keras.backend as K

from keras.callbacks import Callback, TensorBoard, ReduceLROnPlateau, ModelCheckpoint



class LrReducer(Callback):

  def __init__(self, base_lr = 0.01, max_epoch = 150, power=0.9, verbose=1):

    super(Callback, self).__init__()

    self.max_epoch = max_epoch

    self.power = power

    self.verbose = verbose

    self.base_lr = base_lr



  def on_epoch_end(self, epoch, logs={}):

    lr_now = K.get_value(self.model.optimizer.lr)

    new_lr = max(0.00001, min(self.base_lr * (1 - epoch / float(self.max_epoch))**self.power, lr_now))

    K.set_value(self.model.optimizer.lr, new_lr)

    if self.verbose:

        print(" - learning rate: %10f" % (new_lr))



def callbacks(checkpointdir):

  model_checkpoint = ModelCheckpoint(checkpointdir, monitor='iou_score', save_best_only=True, mode='max')

  #tensorboard_callback = TensorBoard(log_dir=logdir, write_graph=True, write_images=True, histogram_freq=1)

  plateau_callback = ReduceLROnPlateau(monitor='loss', factor=0.99, verbose=1, patience=0, min_lr=0.00001)



  #return [model_checkpoint, tensorboard_callback, plateau_callback, LrReducer()]
  return [model_checkpoint, plateau_callback, LrReducer()]

你可能感兴趣的:(AI,keras,python)