逻辑回归通常使用交叉熵作为损失函数,而平方损失函数却很少用,原因是不好求导,而tensorflow在最优化时只许指定损失函数和最优化迭代方法即可自动求出最优解。下面对同一批样本分别用平方损失函数和交叉熵作为损失函数求最优解。
#!/usr/bin/env python
#-*- coding:utf8 -*-
import sys
import tensorflow as tf
import numpy as np
import random
reload(sys)
sys.setdefaultencoding('utf8')
# 使用 NumPy 生成假数据(phony data), 总共 100 个点.
x_data = np.float32(np.random.randn(2, 5000)) # 随机输入
# 1/(1+e^(-x))
y_data = 1.0 / (1.0 + np.exp( -(np.dot([0.100, 0.200], x_data) + 0.1)))
pos_idx = np.where(y_data >= 0.5)
neg_idx = np.where(y_data < 0.5)
y_data[pos_idx] = 1
y_data[neg_idx] = 0
print x_data[:, neg_idx]
print len(pos_idx[0]), len(neg_idx[0])
'''
pos_idx = np.where(y_data >= 0.75)
neg_idx = np.where(y_data < 0.25)
r_idx = np.where((0.25 <= y_data) | (y_data < 0.75))
y_data[pos_idx] = 1
y_data[neg_idx] = 0
y_data[r_idx] = 0 if random.randint(0, 10) < 5 else 1
'''
# 构造一个线性模型
#
b = tf.Variable(tf.zeros([1]))
W = tf.Variable(tf.random_uniform([1, 2], 0.0, 1.0))
y = 1.0 / (1.0 + tf.exp(-(tf.matmul(W, x_data) + b)))
# 最小化方差
loss = tf.reduce_mean(tf.square(y - y_data))
#loss = tf.reduce_mean(-y_data*tf.log(y) + (-1)*(1-y_data)*tf.log(1-y))
optimizer = tf.train.GradientDescentOptimizer(0.02)
train = optimizer.minimize(loss)
# 初始化变量
init = tf.initialize_all_variables()
# 启动图 (graph)
sess = tf.Session()
sess.run(init)
# 拟合平面
for step in xrange(0, 8001):
sess.run(train)
if step % 100 == 0:
y_pred_score = sess.run(y)
y_pred = y_pred_score.copy()
pred_pos_idx = np.where(y_pred >= 0.5)
pred_neg_idx = np.where(y_pred < 0.5)
y_pred[pred_pos_idx] = 1
y_pred[pred_neg_idx] = 0
true_count = 0
for y_real, y_p in zip(y_data, y_pred[0]):
if y_real == y_p:
true_count += 1
accuracy = 1.0*true_count/len(y_data)
from sklearn.metrics import log_loss, roc_auc_score
logloss = "logloss %.7f" % log_loss(y_data, y_pred_score[0])
auc = "auc %.5f" % roc_auc_score(y_data, y_pred_score[0])
print step, auc, logloss, accuracy, sess.run(W), sess.run(b)
0 auc 0.84310 logloss 0.5498822 0.7314 [[0.88772833 0.2901763 ]] [0.00158335]
100 auc 0.91221 logloss 0.4756731 0.8104 [[0.8546784 0.5003181]] [0.14588131]
200 auc 0.95146 logloss 0.4245441 0.8672 [[0.8245201 0.6734331]] [0.26428747]
300 auc 0.97257 logloss 0.3884590 0.9032 [[0.80238885 0.8174961 ]] [0.36127704]
400 auc 0.98398 logloss 0.3619717 0.9282 [[0.78898185 0.93967944]] [0.44154954]
500 auc 0.99047 logloss 0.3416965 0.9454 [[0.7831979 1.0454179]] [0.50902253]
600 auc 0.99420 logloss 0.3255716 0.9576 [[0.78350157 1.1386112 ]] [0.5666945]
700 auc 0.99641 logloss 0.3123266 0.967 [[0.788461 1.2220303]] [0.61678994]
800 auc 0.99773 logloss 0.3011573 0.9744 [[0.79689586 1.2976669 ]] [0.6609511]
900 auc 0.99854 logloss 0.2915378 0.9798 [[0.80788255 1.3669811 ]] [0.70039594]
1000 auc 0.99903 logloss 0.2831120 0.985 [[0.8207148 1.4310641]] [0.7360384]
1100 auc 0.99936 logloss 0.2756309 0.9876 [[0.83485824 1.4907497 ]] [0.7685724]
1200 auc 0.99956 logloss 0.2689148 0.9894 [[0.84990925 1.5466871 ]] [0.79853165]
1300 auc 0.99969 logloss 0.2628302 0.9912 [[0.86556476 1.5993922 ]] [0.8263329]
1400 auc 0.99978 logloss 0.2572758 0.9928 [[0.8815962 1.6492797]] [0.8523048]
1500 auc 0.99984 logloss 0.2521726 0.9934 [[0.8978319 1.6966869]] [0.8767097]
1600 auc 0.99987 logloss 0.2474581 0.9934 [[0.91414297 1.7418948 ]] [0.89975923]
1700 auc 0.99990 logloss 0.2430819 0.9938 [[0.9304329 1.7851367]] [0.92162657]
1800 auc 0.99992 logloss 0.2390026 0.995 [[0.9466299 1.826611 ]] [0.9424536]
1900 auc 0.99994 logloss 0.2351861 0.9954 [[0.962681 1.8664854]] [0.9623591]
2000 auc 0.99995 logloss 0.2316035 0.9954 [[0.97854763 1.9049044 ]] [0.98144174]
2100 auc 0.99996 logloss 0.2282306 0.9956 [[0.9942021 1.9419934]] [0.99978554]
2200 auc 0.99997 logloss 0.2250466 0.9956 [[1.0096254 1.9778619]] [1.0174617]
2300 auc 0.99998 logloss 0.2220335 0.9958 [[1.0248048 2.0126045]] [1.0345314]
2400 auc 0.99998 logloss 0.2191759 0.9958 [[1.0397328 2.0463061]] [1.0510473]
2500 auc 0.99998 logloss 0.2164601 0.9958 [[1.0544051 2.079041 ]] [1.0670553]
2600 auc 0.99999 logloss 0.2138742 0.9964 [[1.0688211 2.1108747]] [1.0825957]
2700 auc 0.99999 logloss 0.2114076 0.9966 [[1.0829816 2.1418688]] [1.0977031]
2800 auc 0.99999 logloss 0.2090511 0.9968 [[1.0968899 2.1720753]] [1.1124094]
2900 auc 0.99999 logloss 0.2067963 0.997 [[1.1105498 2.2015429]] [1.1267406]
3000 auc 0.99999 logloss 0.2046358 0.9974 [[1.1239661 2.2303147]] [1.140722]
3100 auc 0.99999 logloss 0.2025629 0.9974 [[1.137145 2.258431]] [1.154376]
3200 auc 1.00000 logloss 0.2005714 0.9974 [[1.1500918 2.2859278]] [1.1677219]
3300 auc 1.00000 logloss 0.1986561 0.9976 [[1.1628131 2.3128378]] [1.180778]
3400 auc 1.00000 logloss 0.1968119 0.9978 [[1.1753151 2.3391917]] [1.1935602]
3500 auc 1.00000 logloss 0.1950344 0.9978 [[1.1876047 2.3650174]] [1.2060833]
3600 auc 1.00000 logloss 0.1933193 0.9978 [[1.1996881 2.3903406]] [1.2183608]
3700 auc 1.00000 logloss 0.1916631 0.9978 [[1.2115718 2.4151847]] [1.2304051]
3800 auc 1.00000 logloss 0.1900622 0.998 [[1.2232622 2.4395726]] [1.2422276]
3900 auc 1.00000 logloss 0.1885134 0.998 [[1.2347653 2.4635239]] [1.2538384]
4000 auc 1.00000 logloss 0.1870140 0.998 [[1.2460873 2.487058 ]] [1.2652472]
4100 auc 1.00000 logloss 0.1855612 0.998 [[1.2572337 2.5101912]] [1.2764633]
4200 auc 1.00000 logloss 0.1841525 0.998 [[1.2682104 2.5329416]] [1.2874945]
4300 auc 1.00000 logloss 0.1827856 0.998 [[1.2790228 2.555324 ]] [1.298349]
4400 auc 1.00000 logloss 0.1814584 0.9982 [[1.289676 2.5773535]] [1.3090336]
4500 auc 1.00000 logloss 0.1801690 0.9982 [[1.3001752 2.599043 ]] [1.319555]
4600 auc 1.00000 logloss 0.1789154 0.9982 [[1.3105251 2.6204057]] [1.3299196]
4700 auc 1.00000 logloss 0.1776960 0.9982 [[1.3207307 2.641453 ]] [1.3401331]
4800 auc 1.00000 logloss 0.1765093 0.9982 [[1.3307962 2.6621962]] [1.3502012]
4900 auc 1.00000 logloss 0.1753536 0.9982 [[1.340726 2.6826468]] [1.3601291]
5000 auc 1.00000 logloss 0.1742276 0.9982 [[1.3505242 2.7028143]] [1.3699218]
5100 auc 1.00000 logloss 0.1731299 0.9984 [[1.3601946 2.7227085]] [1.3795837]
5200 auc 1.00000 logloss 0.1720594 0.9984 [[1.3697414 2.742338 ]] [1.3891193]
5300 auc 1.00000 logloss 0.1710150 0.9984 [[1.3791678 2.7617116]] [1.3985324]
5400 auc 1.00000 logloss 0.1699954 0.9984 [[1.3884777 2.780837 ]] [1.4078271]
5500 auc 1.00000 logloss 0.1689997 0.9984 [[1.3976743 2.7997227]] [1.4170076]
5600 auc 1.00000 logloss 0.1680270 0.9984 [[1.4067608 2.8183758]] [1.4260768]
5700 auc 1.00000 logloss 0.1670763 0.9984 [[1.4157403 2.8368032]] [1.4350383]
5800 auc 1.00000 logloss 0.1661467 0.9984 [[1.4246159 2.8550112]] [1.4438952]
5900 auc 1.00000 logloss 0.1652375 0.9984 [[1.4333901 2.8730073]] [1.4526504]
6000 auc 1.00000 logloss 0.1643478 0.9984 [[1.4420661 2.8907962]] [1.4613072]
6100 auc 1.00000 logloss 0.1634770 0.9986 [[1.450646 2.908384]] [1.4698681]
6200 auc 1.00000 logloss 0.1626244 0.9988 [[1.4591328 2.925777 ]] [1.4783362]
6300 auc 1.00000 logloss 0.1617893 0.9988 [[1.4675289 2.9429796]] [1.4867129]
6400 auc 1.00000 logloss 0.1609711 0.999 [[1.4758364 2.9599977]] [1.4950017]
6500 auc 1.00000 logloss 0.1601691 0.9994 [[1.4840575 2.976836 ]] [1.5032043]
6600 auc 1.00000 logloss 0.1593830 0.9994 [[1.4921948 2.993498 ]] [1.5113233]
6700 auc 1.00000 logloss 0.1586120 0.9994 [[1.5002501 3.0099897]] [1.5193605]
6800 auc 1.00000 logloss 0.1578557 0.9994 [[1.5082252 3.0263145]] [1.5273182]
6900 auc 1.00000 logloss 0.1571137 0.9994 [[1.5161222 3.0424771]] [1.5351983]
7000 auc 1.00000 logloss 0.1563854 0.9994 [[1.5239431 3.0584805]] [1.543003]
7100 auc 1.00000 logloss 0.1556704 0.9994 [[1.5316896 3.0743296]] [1.5507332]
7200 auc 1.00000 logloss 0.1549683 0.9994 [[1.5393633 3.0900269]] [1.5583913]
7300 auc 1.00000 logloss 0.1542787 0.9996 [[1.5469657 3.1055768]] [1.565979]
7400 auc 1.00000 logloss 0.1536013 0.9996 [[1.5544989 3.1209826]] [1.5734972]
7500 auc 1.00000 logloss 0.1529355 0.9996 [[1.5619645 3.1362479]] [1.5809485]
7600 auc 1.00000 logloss 0.1522811 0.9996 [[1.5693632 3.151375 ]] [1.5883337]
7700 auc 1.00000 logloss 0.1516378 0.9996 [[1.5766975 3.1663673]] [1.5956546]
7800 auc 1.00000 logloss 0.1510053 0.9996 [[1.5839676 3.1812282]] [1.6029121]
7900 auc 1.00000 logloss 0.1503831 0.9996 [[1.5911758 3.1959593]] [1.610108]
8000 auc 1.00000 logloss 0.1497711 0.9996 [[1.5983231 3.2105649]] [1.6172434]
#!/usr/bin/env python
#-*- coding:utf8 -*-
import sys
import tensorflow as tf
import numpy as np
import random
reload(sys)
sys.setdefaultencoding('utf8')
# 使用 NumPy 生成假数据(phony data), 总共 100 个点.
x_data = np.float32(np.random.randn(2, 5000)) # 随机输入
# 1/(1+e^(-x))
y_data = 1.0 / (1.0 + np.exp( -(np.dot([0.100, 0.200], x_data) + 0.1)))
pos_idx = np.where(y_data >= 0.5)
neg_idx = np.where(y_data < 0.5)
y_data[pos_idx] = 1
y_data[neg_idx] = 0
print x_data[:, neg_idx]
print len(pos_idx[0]), len(neg_idx[0])
'''
pos_idx = np.where(y_data >= 0.75)
neg_idx = np.where(y_data < 0.25)
r_idx = np.where((0.25 <= y_data) | (y_data < 0.75))
y_data[pos_idx] = 1
y_data[neg_idx] = 0
y_data[r_idx] = 0 if random.randint(0, 10) < 5 else 1
'''
# 构造一个线性模型
#
b = tf.Variable(tf.zeros([1]))
W = tf.Variable(tf.random_uniform([1, 2], 0.0, 1.0))
y = 1.0 / (1.0 + tf.exp(-(tf.matmul(W, x_data) + b)))
# 最小化方差
#loss = tf.reduce_mean(tf.square(y - y_data))
loss = tf.reduce_mean(-y_data*tf.log(y) + (-1)*(1-y_data)*tf.log(1-y))
optimizer = tf.train.GradientDescentOptimizer(0.005)
train = optimizer.minimize(loss)
# 初始化变量
init = tf.initialize_all_variables()
# 启动图 (graph)
sess = tf.Session()
sess.run(init)
# 拟合平面
for step in xrange(0, 8001):
sess.run(train)
if step % 100 == 0:
y_pred_score = sess.run(y)
y_pred = y_pred_score.copy()
pred_pos_idx = np.where(y_pred >= 0.5)
pred_neg_idx = np.where(y_pred < 0.5)
y_pred[pred_pos_idx] = 1
y_pred[pred_neg_idx] = 0
true_count = 0
for y_real, y_p in zip(y_data, y_pred[0]):
if y_real == y_p:
true_count += 1
accuracy = 1.0*true_count/len(y_data)
from sklearn.metrics import log_loss, roc_auc_score
logloss = "logloss %.7f" % log_loss(y_data, y_pred_score[0])
auc = "auc %.5f" % roc_auc_score(y_data, y_pred_score[0])
print step, auc, logloss, accuracy, sess.run(W), sess.run(b)
0 auc 0.99897 logloss 0.4682144 0.8158 [[0.34303367 0.7908708 ]] [0.00082727]
100 auc 0.99933 logloss 0.4416109 0.853 [[0.38563004 0.8637445 ]] [0.07940046]
200 auc 0.99954 logloss 0.4191604 0.8764 [[0.4248231 0.9317635]] [0.15053903]
300 auc 0.99968 logloss 0.3999746 0.894 [[0.46114084 0.9956518 ]] [0.21527785]
400 auc 0.99977 logloss 0.3833886 0.9102 [[0.4950076 1.0559853]] [0.2744829]
500 auc 0.99983 logloss 0.3688995 0.927 [[0.52676624 1.1132267 ]] [0.32887986]
600 auc 0.99987 logloss 0.3561222 0.938 [[0.55669564 1.1677507 ]] [0.3790786]
700 auc 0.99990 logloss 0.3447581 0.9466 [[0.58502394 1.2198646 ]] [0.42559424]
800 auc 0.99992 logloss 0.3345734 0.9554 [[0.6119396 1.2698219]] [0.46886393]
900 auc 0.99993 logloss 0.3253824 0.9604 [[0.63759995 1.3178353 ]] [0.50925994]
1000 auc 0.99995 logloss 0.3170365 0.9646 [[0.66213787 1.3640852 ]] [0.5471017]
1100 auc 0.99995 logloss 0.3094152 0.9692 [[0.6856646 1.4087251]] [0.5826629]
1200 auc 0.99996 logloss 0.3024202 0.9728 [[0.70827585 1.4518868 ]] [0.6161814]
1300 auc 0.99997 logloss 0.2959703 0.976 [[0.73005396 1.4936855 ]] [0.6478632]
1400 auc 0.99997 logloss 0.2899982 0.979 [[0.75107 1.5342197]] [0.6778873]
1500 auc 0.99997 logloss 0.2844472 0.9822 [[0.7713857 1.5735816]] [0.7064112]
1600 auc 0.99997 logloss 0.2792698 0.985 [[0.7910553 1.611847 ]] [0.73357266]
1700 auc 0.99998 logloss 0.2744252 0.9864 [[0.810127 1.649087]] [0.7594935]
1800 auc 0.99998 logloss 0.2698789 0.988 [[0.8286436 1.6853638]] [0.7842816]
1900 auc 0.99998 logloss 0.2656010 0.9896 [[0.8466426 1.7207344]] [0.8080322]
2000 auc 0.99998 logloss 0.2615655 0.9906 [[0.8641579 1.75525 ]] [0.8308302]
2100 auc 0.99998 logloss 0.2577501 0.9928 [[0.8812199 1.7889571]] [0.85275155]
2200 auc 0.99998 logloss 0.2541350 0.9936 [[0.89785594 1.821899 ]] [0.87386405]
2300 auc 0.99999 logloss 0.2507030 0.994 [[0.9140909 1.8541148]] [0.8942286]
2400 auc 0.99999 logloss 0.2474387 0.9948 [[0.9299476 1.8856399]] [0.9139001]
2500 auc 0.99999 logloss 0.2443287 0.9952 [[0.9454467 1.9165078]] [0.93292785]
2600 auc 0.99999 logloss 0.2413610 0.9964 [[0.96060705 1.9467493 ]] [0.9513565]
2700 auc 0.99999 logloss 0.2385246 0.9966 [[0.9754458 1.976393 ]] [0.9692269]
2800 auc 0.99999 logloss 0.2358101 0.9962 [[0.989979 2.0054648]] [0.98657566]
2900 auc 0.99999 logloss 0.2332087 0.9968 [[1.0042214 2.0339892]] [1.0034361]
3000 auc 0.99999 logloss 0.2307126 0.9972 [[1.0181869 2.0619903]] [1.019839]
3100 auc 0.99999 logloss 0.2283147 0.9978 [[1.0318878 2.0894887]] [1.0358123]
3200 auc 0.99999 logloss 0.2260086 0.998 [[1.0453359 2.116505 ]] [1.0513806]
3300 auc 0.99999 logloss 0.2237885 0.9976 [[1.0585418 2.1430578]] [1.0665686]
3400 auc 0.99999 logloss 0.2216489 0.9978 [[1.0715163 2.1691654]] [1.0813973]
3500 auc 0.99999 logloss 0.2195851 0.998 [[1.0842687 2.1948445]] [1.0958863]
3600 auc 0.99999 logloss 0.2175926 0.9982 [[1.0968077 2.220111 ]] [1.1100539]
3700 auc 0.99999 logloss 0.2156672 0.9982 [[1.1091422 2.2449791]] [1.1239175]
3800 auc 0.99999 logloss 0.2138051 0.9984 [[1.1212792 2.2694638]] [1.1374918]
3900 auc 0.99999 logloss 0.2120030 0.9982 [[1.1332269 2.293578 ]] [1.1507918]
4000 auc 0.99999 logloss 0.2102575 0.9982 [[1.1449919 2.3173344]] [1.1638304]
4100 auc 0.99999 logloss 0.2085657 0.998 [[1.1565804 2.3407454]] [1.176621]
4200 auc 0.99999 logloss 0.2069248 0.998 [[1.1679993 2.3638217]] [1.1891742]
4300 auc 0.99999 logloss 0.2053322 0.998 [[1.179254 2.386575]] [1.2015009]
4400 auc 0.99999 logloss 0.2037857 0.9978 [[1.1903503 2.4090145]] [1.2136115]
4500 auc 0.99999 logloss 0.2022828 0.9978 [[1.2012931 2.4311516]] [1.2255152]
4600 auc 0.99999 logloss 0.2008216 0.9976 [[1.212088 2.4529939]] [1.237221]
4700 auc 0.99999 logloss 0.1994001 0.9976 [[1.2227392 2.4745512]] [1.2487369]
4800 auc 0.99999 logloss 0.1980165 0.9976 [[1.2332513 2.495832 ]] [1.2600706]
4900 auc 0.99999 logloss 0.1966692 0.9976 [[1.2436285 2.5168436]] [1.2712294]
5000 auc 0.99999 logloss 0.1953565 0.9978 [[1.2538753 2.5375948]] [1.28222]
5100 auc 0.99999 logloss 0.1940768 0.9976 [[1.263995 2.558093]] [1.293049]
5200 auc 0.99999 logloss 0.1928289 0.9972 [[1.273992 2.578345]] [1.303722]
5300 auc 0.99999 logloss 0.1916113 0.9972 [[1.283869 2.5983582]] [1.3142455]
5400 auc 0.99999 logloss 0.1904229 0.9972 [[1.2936301 2.618138 ]] [1.3246238]
5500 auc 0.99999 logloss 0.1892624 0.997 [[1.3032784 2.637692 ]] [1.3348626]
5600 auc 0.99999 logloss 0.1881288 0.997 [[1.3128169 2.6570244]] [1.3449669]
5700 auc 0.99999 logloss 0.1870210 0.997 [[1.3222483 2.6761425]] [1.3549405]
5800 auc 0.99999 logloss 0.1859380 0.997 [[1.3315759 2.6950512]] [1.364788]
5900 auc 0.99999 logloss 0.1848789 0.997 [[1.340802 2.713756]] [1.3745135]
6000 auc 0.99999 logloss 0.1838428 0.997 [[1.3499296 2.732262 ]] [1.384121]
6100 auc 0.99999 logloss 0.1828288 0.997 [[1.3589609 2.7505744]] [1.3936142]
6200 auc 0.99999 logloss 0.1818361 0.997 [[1.3678985 2.7686977]] [1.4029965]
6300 auc 0.99999 logloss 0.1808640 0.997 [[1.3767446 2.786637 ]] [1.4122709]
6400 auc 0.99999 logloss 0.1799118 0.997 [[1.3855016 2.8043952]] [1.4214407]
6500 auc 0.99999 logloss 0.1789787 0.997 [[1.3941717 2.8219776]] [1.4305096]
6600 auc 0.99999 logloss 0.1780641 0.997 [[1.4027568 2.839389 ]] [1.4394796]
6700 auc 0.99999 logloss 0.1771675 0.997 [[1.4112588 2.856632 ]] [1.4483538]
6800 auc 0.99999 logloss 0.1762881 0.997 [[1.4196796 2.8737109]] [1.4571344]
6900 auc 0.99999 logloss 0.1754254 0.997 [[1.4280216 2.89063 ]] [1.4658246]
7000 auc 0.99999 logloss 0.1745790 0.997 [[1.4362856 2.9073915]] [1.4744264]
7100 auc 0.99999 logloss 0.1737482 0.997 [[1.4444741 2.924 ]] [1.4829422]
7200 auc 0.99999 logloss 0.1729326 0.997 [[1.4525887 2.940458 ]] [1.4913741]
7300 auc 0.99999 logloss 0.1721317 0.997 [[1.4606307 2.956769 ]] [1.4997244]
7400 auc 0.99999 logloss 0.1713451 0.997 [[1.4686018 2.9729364]] [1.5079951]
7500 auc 0.99999 logloss 0.1705723 0.997 [[1.4765035 2.9889634]] [1.5161877]
7600 auc 0.99999 logloss 0.1698129 0.997 [[1.4843373 3.0048525]] [1.5243046]
7700 auc 0.99999 logloss 0.1690665 0.997 [[1.4921044 3.020606 ]] [1.5323476]
7800 auc 0.99999 logloss 0.1683328 0.997 [[1.4998068 3.0362282]] [1.5403181]
7900 auc 0.99999 logloss 0.1676113 0.9968 [[1.5074447 3.0517194]] [1.5482178]
8000 auc 0.99999 logloss 0.1669018 0.9968 [[1.5150204 3.0670843]] [1.5560484]
平方损失函数最终迭代结果:
8000 auc 1.00000 logloss 0.1497711 0.9996 [[1.5983231 3.2105649]] [1.6172434]
交叉熵损失函数最终迭代结果:
8000 auc 0.99999 logloss 0.1669018 0.9968 [[1.5150204 3.0670843]] [1.5560484]
可以看到两者最终结果相差较小,结果不同也与初始值有关,难说哪个更胜一筹。