基于MindSpore实现二次函数的拟合

基于MindSpore实现二次函数的拟合

参考:https://blog.csdn.net/baidu_37157624/article/details/117315897
注:此为本人智能控制课程作业,可能存在不对的地方请多多指教,搬运的同学请适当修改并表明出处

# by YYC
#2021.10.05

import matplotlib.pyplot as plt
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
import numpy as np
from mindspore import dataset as ds
from mindspore import nn, Tensor, Model
import time
from mindspore.train.callback import Callback, LossMonitor
 
def get_data(num, a=2.0, b=-4.0, c=3.0):    #数据生成函数
    for _ in range(num):
        x = np.random.uniform(-1.0, 1.0)
        y = np.random.uniform(-1.0, 1.0)
        noise = np.random.normal(0, 0.03)
        z = a * x ** 2 + b * x + c + noise
        # 返回参数的时候压缩在一个数组内
        yield np.array([x**2,x]).astype(np.float32), np.array([z]).astype(np.float32)
 
def create_dataset(num_data, batch_size=16, repeat_size=1):  #数据增强函数
    input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['x','z'])
    input_data = input_data.batch(batch_size)
    input_data = input_data.repeat(repeat_size)
    return input_data
 
data_number = 1600
batch_number = 16
repeat_number = 2


ds_train = create_dataset(data_number, batch_size=batch_number, repeat_size=repeat_number)
dict_datasets = next(ds_train.create_dict_iterator())
 
class LinearNet(nn.Cell):
    def __init__(self):
        super(LinearNet, self).__init__()
        # 神经网络的input和output维度设置为2,1
        self.fc = nn.Dense(2, 1, 0.02, 0.02)
 
    def construct(self, x):
        x = self.fc(x)
        return x

eval_data = list(get_data(500))
def plot_model_and_datasets(net, eval_data):  #画图函数
    weight = net.trainable_params()[0]
    bias = net.trainable_params()[1]
    x = np.arange(-10, 10, 0.1)
    y = x*x*Tensor(weight).asnumpy()[0][0] +x * Tensor(weight).asnumpy()[0][1]+ Tensor(bias).asnumpy()[0]
    #x1, y1 = zip(*eval_data)
    x_target = x
    y_target = 2*x_target*x_target - 4*x_target+3
    
    plt.axis([-20, 20, -20, 25])
    #plt.scatter(x1, y1, color="red", s=5)
    plt.plot(x, y, color="blue")
    plt.plot(x_target, y_target, color="green")
    plt.show()
    time.sleep(0.2)
    
net = LinearNet()
model_params = net.trainable_params()
print ('Param Shape is: {}'.format(len(model_params)))
for net_param in net.trainable_params():
    print(net_param, net_param.asnumpy())
net_loss = nn.loss.MSELoss()
 
optim = nn.Momentum(net.trainable_params(), learning_rate=0.005, momentum=0.9)
model = Model(net, net_loss, optim)
 
epoch = 1
model.train(epoch, ds_train, callbacks=[LossMonitor(8)], dataset_sink_mode=False)
 
for net_param in net.trainable_params():
    print(net_param, net_param.asnumpy())
    
plot_model_and_datasets(net,eval_data)

拟合结果图:

基于MindSpore实现二次函数的拟合_第1张图片

你可能感兴趣的:(随笔,python,神经网络,MindSpore,二次函数拟合,非线性函数拟合)