参考:
https://morvanzhou.github.io/tutorials/machine-learning/keras/2-1-regressor/#%E5%8F%AF%E8%A7%86%E5%8C%96%E7%BB%93%E6%9E%9C
https://blog.csdn.net/cymy001/article/details/78647933
#!/usr/bin/env python3
import numpy as np
np.random.seed(0)
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
import matplotlib.pyplot as plt
X = np.linspace(-1, 1, 1000)
np.random.shuffle(X)
Y = np.square(X)+ np.random.normal(0, 0.05, (1000, ))
#80% training 20% testing
X_train, Y_train = X[:800], Y[:800]
X_test, Y_test = X[800:], Y[800:]
# fig= plt.figure(figsize=(4,3))
# plt.subplot(1,2,1)
# plt.scatter(X_train, Y_train)
# plt.subplot(1,2,2)
# plt.scatter(X_test, Y_test)
# plt.plot(X_test,Y_pred,'r')
# plt.show()
model = Sequential()
model.add(Dense(input_dim = 1, units=10))
model.add(Activation('tanh'))
model.add(Dense(units=1))
model.add(Activation('tanh'))
#定义优化算法(修改学习率)
# defsgd=SGD(lr=0.3)
model.compile(loss='mse', optimizer='sgd')
print("training........")
for step in range(10001):
cost = model.train_on_batch(X_train, Y_train)
if step % 2000 == 0:
print("train cost:", cost)
print("Testing.........")
# cost = model.evaluate(X_test, Y_test, batch_size=40)
# print("test cost:", cost)
W, b = model.layers[0].get_weights()
print('Weights=', W, '\nbiases=', b)
# plotting the prediction
plt.scatter(X_test, Y_test, c='b')
X_test = np.sort(X_test) #这里需要排序,之前打乱了X_test, 不然plt.plot会变成一坨
Y_pred = model.predict(X_test)
plt.scatter(X_test, Y_pred, c='r')
plt.plot(X_test, Y_pred,'r')
plt.show()
model = Sequential()
model.add(Dense(input_dim = 1, units=10))
model.add(Activation('tanh'))
model.add(Dense(units=1))
model.add(Activation('tanh'))
这里如果只有一层只能进行线性拟合, 如:
model.add(Dense(units=1, input_dim=1))
model.add(Activation('tanh'))
结果:
这里如果有两层并用relu作为激活函数
model.add(Dense(input_dim = 1, units=10))
model.add(Activation('relu'))
model.add(Dense(units=1, input_dim=1))
model.add(Activation('relu'))
增加units和全连接层数
model.add(Dense(input_dim = 1, units=10))
model.add(Activation('relu'))
model.add(Dense(units=50))
model.add(Activation('relu'))
model.add(Dense(units=1, input_dim=1))
model.add(Activation('relu'))
换成tanh激活函数
model.add(Dense(input_dim = 1, units=10))
model.add(Activation('tanh'))
model.add(Dense(units=50))
model.add(Activation('tanh'))
model.add(Dense(units=1, input_dim=1))
model.add(Activation('tanh'))