Tensorflow 自定义高斯激活函数

Gaussian non-linear activation function

tensorflow自带了几个常用的激活函数,但是最近手头的项目,激活函数用的既不是relu,也不是sigmod,而是一个很生僻的函数,高斯激活函数(Gaussian activation function)。

自定义高斯函数形式:f(x) = exp( - (x^2) / (sigma^2) )

实验中的 sigma = 0.5

 

自定义激活函数代码如下

新建文件gaussian_activation.py

#-*- encoding:utf-8 -*-
#!/usr/local/env python

import numpy as np
import tensorflow as tf
import math
from tensorflow.python.framework import ops

def gaussian(x):
    return math.exp(- (x*x) / (0.25))

def gaussian_grad(x):
    return (-8) * x * math.exp(- (x*x) / (0.25))

gaussian_np = np.vectorize(gaussian)
gaussian_grad_np = np.vectorize(gaussian_grad)

gaussian_np_32 = lambda x: gaussian_np(x).astype(np.float32)
gaussian_grad_np_32 = lambda x: gaussian_grad_np(x).astype(np.float32)

def gaussian_grad_tf(x, name=None):
    with ops.name_scope(name, "gaussian_grad_tf", [x]) as name:
        y = tf.py_func(gaussian_grad_np_32, [x], [tf.float32], name=name, stateful=False)
        return y[0]

def my_py_func(func, inp, Tout, stateful=False, name=None, my_grad_func=None):
    # need to generate a unique name to avoid duplicates:
    random_name = "PyFuncGrad" + str(np.random.randint(0, 1E+8))
    tf.RegisterGradient(random_name)(my_grad_func)
    g = tf.get_default_graph()
    with g.gradient_override_map({"PyFunc": random_name, "PyFuncStateless": random_name}):
        return tf.py_func(func, inp, Tout, stateful=stateful, name=name)

def _gaussian_grad(op, pred_grad):
    x = op.inputs[0]
    cur_grad = gaussian_grad(x)
    next_grad = pred_grad * cur_grad
    return next_grad

def gaussian_activation(x, name=None):
    with ops.name_scope(name, "gaussian_activator", [x]) as name:
        y = my_py_func(gaussian_np_32 ,
                       [x],
                       [tf.float32],
                       stateful=False,
                       name=name,
                       my_grad_func=_gaussian_grad)
    return y[0]


使用自定义高斯激活函数:

import numpy as np
import tensorflow as tf

import gaussian_activation

a = tf.constant([ -2 , 5 ])

with tf.Session() as sess:
    b = gaussian_activation.gaussian_activation(a)
    print sess.run(b)

 

你可能感兴趣的:(深度学习)