#R语言神经网络与深度学习(一)
#画出ReLU函数
x=seq(-1,1,0.1) #生成x变量,赋值-1~1等差数列
relu=function(x) ifelse(x>0,x,0) #定义relu函数
plot(x,relu(x),type="l") #画出函数
text(0.6,0.4,"ReLU(x)") #添加文字说明
#感知器模型
install.packages("neuralnet") #神经网络基本包
library(neuralnet)
#先看一个神经网络完成非线性回归的例子
traininginput=as.data.frame(runif(50,min = 1,max = 100))#生成1~100之间的50个随机数作为训练输入
trainingoutput=log(traininginput)
traindata=cbind(traininginput,trainingoutput) #按列绑定输入输出构成训练集
colnames(traindata)=c("input","output") #设定训练数据的属性名
#网络包含输入层+隐层+输出层,隐层带有10个神经元
#指定损失函数阈值0.01
net.log=neuralnet(output~input,traindata,hidden = 10,threshold = 0.01)
print(net.log) #输出结果非常丰富,包含列神经网络各层结点之间的权重,以及拟合效果等
$call
neuralnet(formula = output ~ input, data = traindata, hidden = 10,
threshold = 0.01)
$response
output
1 3.7636802
2 4.4644749
3 1.7436266
4 4.4739950
5 4.5343469
6 4.3922093
7 3.1241007
8 4.1268159
9 4.5423753
10 4.0774836
11 4.0416826
12 4.5609231
13 1.9013686
14 4.0447385
15 3.9645057
16 4.2647878
17 4.1592951
18 4.0911714
19 4.1509276
20 4.0576335
21 4.3243332
22 3.9223834
23 0.2055446
24 3.7957673
25 4.5556668
26 4.0531594
27 3.7693452
28 4.0053732
29 2.7642808
30 3.8549792
31 2.9460532
32 2.6809099
33 2.9693048
34 3.9097673
35 4.3624009
36 3.8901353
37 3.7667034
38 2.9365815
39 4.6030032
40 4.5424663
41 4.6006236
42 4.4700970
43 4.2985708
44 4.6039118
45 2.4092103
46 3.8176258
47 2.1870567
48 4.4869113
49 4.3311433
50 4.4543536
$covariate
[1,] 43.106777
[2,] 86.875399
[3,] 5.718043
[4,] 87.706411
[5,] 93.162653
[6,] 80.818776
[7,] 22.739436
[8,] 61.980258
[9,] 93.913611
[10,] 58.996821
[11,] 56.922039
[12,] 95.671757
[13,] 6.695051
[14,] 57.096255
[15,] 52.694217
[16,] 71.149822
[17,] 64.026377
[18,] 59.809912
[19,] 63.492872
[20,] 57.837277
[21,] 75.515146
[22,] 50.520712
[23,] 1.228194
[24,] 44.512379
[25,] 95.170192
[26,] 57.579083
[27,] 43.351669
[28,] 54.892306
[29,] 15.867625
[30,] 47.227636
[31,] 19.030695
[32,] 14.598370
[33,] 19.478373
[34,] 49.887342
[35,] 78.445248
[36,] 48.917503
[37,] 43.237292
[38,] 18.851293
[39,] 99.783536
[40,] 93.922153
[41,] 99.546369
[42,] 87.365201
[43,] 73.594536
[44,] 99.874243
[45,] 11.125173
[46,] 45.496064
[47,] 8.908953
[48,] 88.846600
[49,] 76.031160
[50,] 86.000542
$model.list
$model.list$response
[1] "output"
$model.list$variables
[1] "input"
$err.fct
function (x, y)
{
1/2 * (y - x)^2
}
attr(,"type")
[1] "sse"
$act.fct
function (x)
{
1/(1 + exp(-x))
}
attr(,"type")
[1] "logistic"
$linear.output
[1] TRUE
$data
input output
1 43.106777 3.7636802
2 86.875399 4.4644749
3 5.718043 1.7436266
4 87.706411 4.4739950
5 93.162653 4.5343469
6 80.818776 4.3922093
7 22.739436 3.1241007
8 61.980258 4.1268159
9 93.913611 4.5423753
10 58.996821 4.0774836
11 56.922039 4.0416826
12 95.671757 4.5609231
13 6.695051 1.9013686
14 57.096255 4.0447385
15 52.694217 3.9645057
16 71.149822 4.2647878
17 64.026377 4.1592951
18 59.809912 4.0911714
19 63.492872 4.1509276
20 57.837277 4.0576335
21 75.515146 4.3243332
22 50.520712 3.9223834
23 1.228194 0.2055446
24 44.512379 3.7957673
25 95.170192 4.5556668
26 57.579083 4.0531594
27 43.351669 3.7693452
28 54.892306 4.0053732
29 15.867625 2.7642808
30 47.227636 3.8549792
31 19.030695 2.9460532
32 14.598370 2.6809099
33 19.478373 2.9693048
34 49.887342 3.9097673
35 78.445248 4.3624009
36 48.917503 3.8901353
37 43.237292 3.7667034
38 18.851293 2.9365815
39 99.783536 4.6030032
40 93.922153 4.5424663
41 99.546369 4.6006236
42 87.365201 4.4700970
43 73.594536 4.2985708
44 99.874243 4.6039118
45 11.125173 2.4092103
46 45.496064 3.8176258
47 8.908953 2.1870567
48 88.846600 4.4869113
49 76.031160 4.3311433
50 86.000542 4.4543536
$exclude
NULL
$net.result
$net.result[[1]]
[,1]
[1,] 3.7611324
[2,] 4.4676382
[3,] 1.7450071
[4,] 4.4767355
[5,] 4.5333355
[6,] 4.3973251
[7,] 3.1264467
[8,] 4.1283555
[9,] 4.5407127
[10,] 4.0778398
[11,] 4.0412524
[12,] 4.5576071
[13,] 1.9011417
[14,] 4.0443723
[15,] 3.9627180
[16,] 4.2694251
[17,] 4.1616376
[18,] 4.0918471
[19,] 4.1530636
[20,] 4.0575438
[21,] 4.3296645
[22,] 3.9200950
[23,] 0.2058114
[24,] 3.7930785
[25,] 4.5528409
[26,] 4.0529725
[27,] 3.7667663
[28,] 4.0042391
[29,] 2.7649095
[30,] 3.8522756
[31,] 2.9476404
[32,] 2.6810941
[33,] 2.9710108
[34,] 3.9073649
[35,] 4.3677722
[36,] 3.8875888
[37,] 3.7641387
[38,] 2.9381196
[39,] 4.5951146
[40,] 4.5407961
[41,] 4.5930255
[42,] 4.4730158
[43,] 4.3036824
[44,] 4.5959112
[45,] 2.4079249
[46,] 3.8148925
[47,] 2.1852585
[48,] 4.4890086
[49,] 4.3365084
[50,] 4.4579205
$weights
$weights[[1]]
$weights[[1]][[1]]
[,1] [,2] [,3] [,4] [,5]
[1,] 0.44219842 3.240757 3.202168 -0.7877108 0.40986679
[2,] 0.03712114 3.187768 1.834951 0.1106201 -0.02969351
[,6] [,7] [,8] [,9] [,10]
[1,] -0.63775130 0.3209541 1.088130 -1.7936935 2.897821
[2,] 0.02159582 -0.2508112 -1.443933 0.7005957 3.665216
$weights[[1]][[2]]
[,1]
[1,] 0.97762799
[2,] 1.18062819
[3,] -0.66380527
[4,] -0.01894856
[5,] 1.22171422
[6,] -0.94174505
[7,] 1.95836189
[8,] -1.71540135
[9,] -1.58353932
[10,] 0.49503450
[11,] -0.11677912
$generalized.weights
$generalized.weights[[1]]
[,1]
[1,] -0.0022210615
[2,] -0.0007116606
[3,] -0.1330105041
[4,] -0.0006983659
[5,] -0.0006173567
[6,] -0.0008171924
[7,] -0.0066323721
[8,] -0.0012800595
[9,] -0.0006070058
[10,] -0.0013815810
[11,] -0.0014591160
[12,] -0.0005834740
[13,] -0.0863911855
[14,] -0.0014523614
[15,] -0.0016390983
[16,] -0.0010236995
[17,] -0.0012162685
[18,] -0.0013528213
[19,] -0.0012324843
[20,] -0.0014241447
[21,] -0.0009238604
[22,] -0.0017459440
[23,] 4.5417733837
[24,] -0.0021141160
[25,] -0.0005900886
[26,] -0.0014338833
[27,] -0.0022017184
[28,] -0.0015414536
[29,] -0.0129830198
[30,] -0.0019321744
[31,] -0.0092002434
[32,] -0.0152801351
[33,] -0.0088113650
[34,] -0.0017792936
[35,] -0.0008631226
[36,] -0.0018325209
[37,] -0.0022107137
[38,] -0.0093639779
[39,] -0.0005320635
[40,] -0.0006068891
[41,] -0.0005348975
[42,] -0.0007037923
[43,] -0.0009663184
[44,] -0.0005309836
[45,] -0.0266392482
[46,] -0.0020447419
[47,] -0.0432961417
[48,] -0.0006805536
[49,] -0.0009128212
[50,] -0.0007259494
$startweights
$startweights[[1]]
$startweights[[1]][[1]]
[,1] [,2] [,3] [,4] [,5]
[1,] -0.03350376 0.2343721 0.9467223 0.6239159 -0.4009166
[2,] 0.18488311 0.5129832 0.3752907 0.2825048 -0.6657770
[,6] [,7] [,8] [,9] [,10]
[1,] -0.5012463 -0.5600915 0.9140493 -1.970049 -0.1085638
[2,] 0.1647918 -1.1582757 -1.4669017 1.260016 0.6588312
$startweights[[1]][[2]]
[,1]
[1,] 0.6881827
[2,] 0.7268674
[3,] -0.9532505
[4,] -0.3083938
[5,] 0.9051658
[6,] -0.2535406
[7,] 0.7907308
[8,] -0.2203825
[9,] 1.0148172
[10,] 0.2076412
[11,] -0.4062244
$result.matrix
[,1]
error 2.551317e-04
reached.threshold 8.599877e-03
steps 1.757000e+03
Intercept.to.1layhid1 4.421984e-01
input.to.1layhid1 3.712114e-02
Intercept.to.1layhid2 3.240757e+00
input.to.1layhid2 3.187768e+00
Intercept.to.1layhid3 3.202168e+00
input.to.1layhid3 1.834951e+00
Intercept.to.1layhid4 -7.877108e-01
input.to.1layhid4 1.106201e-01
Intercept.to.1layhid5 4.098668e-01
input.to.1layhid5 -2.969351e-02
Intercept.to.1layhid6 -6.377513e-01
input.to.1layhid6 2.159582e-02
Intercept.to.1layhid7 3.209541e-01
input.to.1layhid7 -2.508112e-01
Intercept.to.1layhid8 1.088130e+00
input.to.1layhid8 -1.443933e+00
Intercept.to.1layhid9 -1.793694e+00
input.to.1layhid9 7.005957e-01
Intercept.to.1layhid10 2.897821e+00
input.to.1layhid10 3.665216e+00
Intercept.to.output 9.776280e-01
1layhid1.to.output 1.180628e+00
1layhid2.to.output -6.638053e-01
1layhid3.to.output -1.894856e-02
1layhid4.to.output 1.221714e+00
1layhid5.to.output -9.417451e-01
1layhid6.to.output 1.958362e+00
1layhid7.to.output -1.715401e+00
1layhid8.to.output -1.583539e+00
1layhid9.to.output 4.950345e-01
1layhid10.to.output -1.167791e-01
attr(,"class")
[1] "nn"
ls(net.log) #得到神经网络包含等属性,并且可以通过net.log$XXX直接访问内容
[1] "act.fct" "call"
[3] "covariate" "data"
[5] "err.fct" "exclude"
[7] "generalized.weights" "linear.output"
[9] "model.list" "net.result"
[11] "response" "result.matrix"
[13] "startweights" "weights"
net.log$act.fct #查看act.fct属性的结果,激活函数默认sigmoid
function (x)
{
1/(1 + exp(-x))
}
attr(,"type")
[1] "logistic"
plot(net.log) #画出神经网络
#神经网络计算效果案例
testdata=as.data.frame((1:10)^2) #测试能否准确计算一个数的对数值
net.result=compute(net.log,testdata) #输入测试数据,让训练好的神经网络完成运算
ls(net.result) #显示运算结果
[1] "net.result" "neurons"
net.results=compute(net.log,testdata) #输入测试数据,让训练好的神经网络完成运算
ls(net.results) #显示运算结果
[1] "net.result" "neurons"
#为了方便比较,把实际值和神经网络计算的值列一起
niceoutput=cbind(testdata,log(testdata),as.data.frame(net.results$net.result))
colnames(niceoutput)=c("input","expected output","neural net output") #输出结果列命名
print(niceoutput) #打印出结果
input expected output neural net output
1 1 0.000000 0.03171921
2 4 1.386294 1.39359903
3 9 2.197225 2.19541861
4 16 2.772589 2.77326131
5 25 3.218876 3.22133011
6 36 3.583519 3.58289693
7 49 3.891820 3.88928457
8 64 4.158883 4.16121537
9 81 4.394449 4.39953416
10 100 4.605170 4.59701350
#还可以计算实际值与预测值差(误差)的均方差
error=as.data.frame(net.results$net.result)-log(testdata) #得到预测误差
mse=sum((error)^2)/nrow(error) #计算均方误差
mse #展示结果,误差均方很小,预测非常准
[1] 0.0001173849