import numpy as np
def softmax(x):
totalSum = np.sum(exp(x), axis = 0)
return exp(x)/totalSum
#求范数
def norm(x):
return np.sqrt(np.sum(np.square(x), axis = -1,keepdims =True))
#squash压缩函数
def squash(x):
s_squared_norm = np.sum(np.square(x), -1, keepdims = True)
scale = np.sqrt(s_squared_norm)/(0.5 + s_squared_norm)
return scale*x
a = 2*np.random.random((10,5))-1
c = a*norm(a)
c = softmax(c)