tensorflow自定义激活函数

import tensorflow as tf
from keras.layers import Dense, Dropout, Activation
from keras.models import Sequential
from keras.utils.generic_utils import get_custom_objects
class Lrelu(Activation):

    def __init__(self, activation, **kwargs):
        super(Lrelu, self).__init__(activation, **kwargs)
        self.__name__ = 'lrelu'
def lrelu(x, leak=0.2): # , name="lrelu"
    # with tf.variable_scope(name):
    f1 = 0.5 * (1 + leak)
    f2 = 0.5 * (1 - leak)
    return f1 * x + f2 * abs(x)
get_custom_objects().update({'lrelu': Lrelu(lrelu)})

def _get_sae(inputs, hidden, output):
    """SAE(Auto-Encoders)
    Build SAE Model.

    # Arguments
        inputs: Integer, number of input units.
        hidden: Integer, number of hidden units.
        output: Integer, number of output units.
    # Returns
        model: Model, nn model.
    """

    model = Sequential()
    model.add(Dense(hidden, input_dim=inputs, name='hidden'))
    model.add(Activation('lrelu'))
    model.add(Dropout(0.2))
    model.add(Dense(output, activation='tanh'))

    return model

你可能感兴趣的:(tensorflow,keras,深度学习)