以下面数据举例
- data_train <- data.frame(matrix(0,ncol=5,nrow=10))
- data_train$X1 <- as.numeric(1:10)
- data_train$X2 <- as.numeric(2:11)
- data_train$X3 <- as.numeric(3:12)
- data_train$X4 <- as.numeric(4:13)
- data_train$X5 <- as.numeric(5:14)
- library(nnet)
- nnet.sol <- nnet(X5~.,data=data_train,size=2,maxit=1000)
- predict(nnet.sol)
- #结果为 1 1 1 1 1 1 1 1 1 1
- library(AMORE)
- net <- newff(n.neurons=c(4,3,3,1), learning.rate.global=1e-2, momentum.global=0.5,
- error.criterium="LMS", Stao=NA, hidden.layer="tansig",
- output.layer="sigmoid", method="BATCHgdwm")
- result <- train(net, data_train[,1:4] , data_train[,5], error.criterium="LMS", report=TRUE, show.step=100, n.shows=10 )
- sim(result$net, data_train[,1:4])
- #结果为 1 1 1 1 1 1 1 1 1 1
请问是怎么回事?


雷达卡




京公网安备 11010802022788号







