- 阅读权限
- 255
- 威望
- 0 级
- 论坛币
- 41133 个
- 通用积分
- 2.0023
- 学术水平
- 7 点
- 热心指数
- 5 点
- 信用等级
- 5 点
- 经验
- 2201 点
- 帖子
- 198
- 精华
- 1
- 在线时间
- 36 小时
- 注册时间
- 2015-6-1
- 最后登录
- 2024-3-3
|
- ######################## USING TENSORFLOW (neural network)
- library(tensorflow)
- np <- import("numpy")
- tf <- import("tensorflow")
- # Loading input and test data
- xFeatures = c("Temperature", "Humidity", "Light", "CO2", "HumidityRatio")
- yFeatures = "Occupancy"
- occupancy_train <-read.csv("/occupation_detection/datatraining.txt",stringsAsFactors = T)
- occupancy_test <- read.csv("/occupation_detection/datatest.txt",stringsAsFactors = T)
- # subset features for modeling and transform to numeric values
- occupancy_train<-apply(occupancy_train[, c(xFeatures, yFeatures)], 2, FUN=as.numeric)
- occupancy_test<-apply(occupancy_test[, c(xFeatures, yFeatures)], 2, FUN=as.numeric)
- # Data dimensions
- nFeatures<-length(xFeatures)
- nRow<-nrow(occupancy_train)
- # Reset the graph
- tf$reset_default_graph()
- # Starting session as interactive session
- sess<-tf$InteractiveSession()
- # Network Parameters
- n_hidden_1 = 5L # 1st layer number of features
- n_hidden_2 = 5L # 2nd layer number of features
- n_input = 5L # 5 attributes
- n_classes = 1L # Binary class
- # Model Parameters
- learning_rate = 0.001
- training_epochs = 10000
- # Graph input
- x = tf$constant(unlist(occupancy_train[,xFeatures]), shape=c(nRow, n_input), dtype=np$float32)
- y = tf$constant(unlist(occupancy_train[,yFeatures]), dtype="float32", shape=c(nRow, 1L))
- # Create model
- multilayer_perceptron <- function(x, weights, biases){
- # Hidden layer with RELU activation
- layer_1 = tf$add(tf$matmul(x, weights[["h1"]]), biases[["b1"]])
- layer_1 = tf$nn$relu(layer_1)
- # Hidden layer with RELU activation
- layer_2 = tf$add(tf$matmul(layer_1, weights[["h2"]]), biases[["b2"]])
- layer_2 = tf$nn$relu(layer_2)
- # Output layer with linear activation
- out_layer = tf$matmul(layer_2, weights[["out"]]) + biases[["out"]]
- return(out_layer)
- }
- # Initialises and store hidden layer's weight & bias
- weights = list(
- "h1" = tf$Variable(tf$random_normal(c(n_input, n_hidden_1))),
- "h2" = tf$Variable(tf$random_normal(c(n_hidden_1, n_hidden_2))),
- "out" = tf$Variable(tf$random_normal(c(n_hidden_2, n_classes)))
- )
- biases = list(
- "b1" = tf$Variable(tf$random_normal(c(1L,n_hidden_1))),
- "b2" = tf$Variable(tf$random_normal(c(1L,n_hidden_2))),
- "out" = tf$Variable(tf$random_normal(c(1L,n_classes)))
- )
- # Construct model
- pred = multilayer_perceptron(x, weights, biases)
- # Define loss and optimizer
- cost = tf$reduce_mean(tf$nn$sigmoid_cross_entropy_with_logits(logits=pred, labels=y))
- optimizer = tf$train$AdamOptimizer(learning_rate=learning_rate)$minimize(cost)
- # Initializing the global variables
- init = tf$global_variables_initializer()
- sess$run(init)
- # Training cycle
- for(epoch in 1:training_epochs){
- sess$run(optimizer)
- if (epoch %% 20== 0)
- cat(epoch, "-", sess$run(cost), "\n")
- }
- # Performance on Train
- library(pROC)
- ypred <- sess$run(tf$nn$sigmoid(multilayer_perceptron(x, weights, biases)))
- roc_obj <- roc(occupancy_train[, yFeatures], as.numeric(ypred))
- # Performance on Test
- nRowt<-nrow(occupancy_test)
- xt <- tf$constant(unlist(occupancy_test[, xFeatures]), shape=c(nRowt, nFeatures), dtype=np$float32) #
- ypredt <- sess$run(tf$nn$sigmoid(multilayer_perceptron(xt, weights, biases)))
- roc_objt <- roc(occupancy_test[, yFeatures], as.numeric(ypredt))
- plot.roc(roc_obj, col = "green", lty=2, lwd=2)
- plot.roc(roc_objt, add=T, col="red", lty=4, lwd=2)
复制代码
|
|