端基法和最小二乘法计算线性度

import numpy as np


def end_group_method(data1, data2):
    print("端基法")
    dict1 = {key: value for key, value in zip(data1, data2)}
    dict1 = dict(sorted(dict1.items(), key=lambda i: i[1], reverse=False))
    data1 = list(dict1.keys())
    data2 = list(dict1.values())
    m_k = (data2[len(y) - 1] - data2[0]) / (data1[len(data1) - 1] - data1[0])
    m_b = (data2[0] * data1[len(x) - 1] - data2[len(data2) - 1] * data1[0]) / (data1[len(data1) - 1] - data1[0])
    print("y=%.4fx+%.4f" % (m_k, m_b))
    m_L = np.subtract(data2, (np.multiply(m_k, data1) + m_b))
    L_max = max(np.abs(m_L))
    print(f"L_max={L_max}")
    print(f"e_L = +-{L_max / data2[len(data2) - 1] * 100}%")
    return None


def least_square_method(data1, data2):
    print("最小二乘法")
    dict1 = {key: value for key, value in zip(data1, data2)}
    dict1 = dict(sorted(dict1.items(), key=lambda i: i[1], reverse=False))
    data1 = list(dict1.keys())
    data2 = list(dict1.values())
    m_k = (len(data1) * sum(np.multiply(data1, data2)) - sum(data1)*sum(data2)) / (len(data1)*sum(np.power(data1, 2)) - sum(data1)**2)
    m_b = (sum(np.power(data1, 2)) * sum(data2) - sum(data1) * sum(np.multiply(data1, data2))) / (len(data1)*sum(np.power(data1, 2)) - sum(data1)**2)
    print("y=%.4fx+%.4f" % (m_k, m_b))
    m_L = np.subtract(data2, (np.multiply(m_k, data1) + m_b))
    L_max = max(np.abs(m_L))
    print(f"L_max={L_max}")
    print(f"e_L = +-{L_max / data2[len(data2) - 1] * 100}%")
    return None


x = [0.9, 2.5, 3.3, 4.5, 6.7, 5.7]
y = [1.1, 1.6, 2.6, 3.2, 5.0, 4.0]

end_group_method(x, y)
print("")
least_square_method(x, y)

你可能感兴趣的:(平平无奇的小脚本,python,numpy)