https://survey.huaweicloud.com/survey/#/qtn?id=f37d8d8fa7df45eb9af8b110299d6cd6
#最终展示
# 读取数据集
training_data_include_y, training_x, y = get_training_data("./ML/02/lr2_data.txt")
# 获取数据集数量及特征数
sample_count, feature_count = training_x.shape
# 定义学习步长 α
alpha = 0.01
# 初始化 Ɵ
theta = init_theta(feature_count)
# 获取最终的参数 Ɵ 及代价
result_theta,Jthetas = gradient_descending(training_x, y, theta, alpha)
# 打印参数
print("w:{}".format(result_theta[0][0]),"b:{}".format(result_theta[1][0]))
showJTheta(Jthetas)
showlinercurve(result_theta, training_data_include_y)
#定义函数,实现梯度下降法
def gradient_descending(X, y, theta, alpha):
Jthetas= [] # 记录代价函数 J(θ)的变化趋势,验证梯度下降是否运行正确
# 计算损失函数,等于真实值与预测值差的平方。(y^i-h(x^i))^2
Jtheta = (X.dot(theta)-y).T.dot(X.dot(theta)-y)
index = 0
gradient = generate_gradient(X, theta, y) #计算梯度
while not np.all(np.absolute(gradient) <= 1e-5): #梯度小于 0.00001 时计算结束
theta = theta - alpha * gradient
gradient = generate_gradient(X, theta, y) #计算新梯度
# 计算损失函数,等于真实值与预测值差的平方(y^i-h(x^i))^2
Jtheta = (X.dot(theta)-y).T.dot(X.dot(theta)-y)
if (index+1) % 10 == 0:
Jthetas.append((index, Jtheta[0])) #每 10 次计算记录一次结果
index += 1
return theta,Jthetas