library(mlbench)
data("PimaIndiansDiabetes2",package = "mlbench")
ncol(PimaIndiansDiabetes2)
nrow(PimaIndiansDiabetes2)
str(PimaIndiansDiabetes2)
sapply(PimaIndiansDiabetes2,function(x) sum(is.na(x)))
temp<-(PimaIndiansDiabetes2)
temp$insulin<-NULL
temp$triceps<-NULL
temp<-na.omit(temp)
nrow(temp)
ncol(temp)
y<-(temp$diabetes)
temp$diabetes<-NULL
temp<-scale(temp)
temp<-cbind(as.factor(y),temp)
class(temp)
summary(temp)
set.seed(2016)
n=nrow(temp)
n_train<-600
n_test<-n-n_train
train<-sample(1:n,n_train,FALSE)
require(RSNNS)
set.seed(2016)
X<-temp[train,1:6]
Y<-temp[train,7]
fitMLP<-mlp(x=X,y=Y,size = c(12,8),maxit = 1000,
initFunc = "Random_Weights",
initFuncParams = c(-0.3,0.3),
learnFunc = "Std_Backpropagation",
learnFuncParams = c(0.2,0),
updateFunc = "Topological_Order",
updateFuncParams = c(0),
hiddenActFunc = "Act_Logistic",
shufflePatterns = TRUE,
linOut = TRUE)
predMLP<-sign(predict(fitMLP,temp[-train,1:6]))
table(predMLP,sign(temp[-train,7]),dnn=c("Predicted","Observed"))
Observed Predicted -1 1 -1 60 13 1 14 37
error_rate=(1-sum(predMLP==sign(temp[-train,7]))/124)
round(error_rate,3)
[1] 0.218 ========================================================================================================
detach("package:RSNNS",unload = TRUE)
library(AMORE)
net<-newff(n.neurons = c(6,12,8,1),
learning.rate.global = 0.01,
momentum.global = 0.5,
error.criterium = "LMLS",
Stao=NA,
hidden.layer = "sigmoid",
output.layer = "purelin",
method = "ADAPTgdwm")
X<-temp[train,]
Y<-temp[train,7]
fit<-train(net,P=X,T=Y,error.criterium="LMLS",
report=T,show.step=100,n.shows=5)
index.show: 1 LMLS 0.185166160641345 index.show: 2 LMLS 0.176012124255052 index.show: 3 LMLS 0.169291895233891 index.show: 4 LMLS 0.162183154823355 index.show: 5 LMLS 0.156368572375286
pred<-sign(sim(fit$net,temp[-train,]))
table(pred,sign(temp[-train,7]),
dnn=c("Predicted","Observed"))
Observed Predicted -1 1 -1 66 15 1 8 35
error_rate=(1-sum(pred==sign(temp[-train,7]))/124)
round(error_rate,3)
[1] 0.185========================================================================================
先做到这里。。。