1,核回归(Kernal Regression),首先定义诸如高斯核函数,Epanechnikov核函数,再基于最优宽窗h,并基于Nadaraya-Waston核估计得到结果,代码如下:
1.1,高斯核函数与Epane核函数
[hide]
- #Gaussian Kernal
- #高斯核
- kernalGaussian <- function(xData)
- {
- #得到相应的核函数
- if(ncol(xData)!=1)
- {
- stop('error input data')
- }
- stdX <- sd(xData)
- #高斯宽带的选择
- h <- 1.06*stdX*length(xData)^(-1/5)
- kernalX <- 1/(h*sqrt(2*pi)) * exp(-xData^2/(2*h^2))
- return(kernalX)
- }
- #Epanechnikov kernal
- kernalEpanechnikov <- function (xData)
- {
- if(ncol(xData)!=1)
- {
- stop('error input the data')
- }
- stdX <- sd(xData)
- h<-2.34*stdX*length(xData)^(-1/5)
- xPh<- abs(xData/h)
- xPh[xPh <=1] <-1
- xPh[xPh>1] <- 0
- kernalX <- 0.75/h*(1-(xData/h)^2)*xPh
- return(kernalX)
- }
复制代码
[/hide]
1.2 两个核函数的检测:
[hide]
- #两个核函数的检测
- testData1 <- as.matrix(seq(-10,10,by = 0.5))
- testData2 <- as.matrix(seq(-10,10,length = 100))
- kernalGaussian(testData1)
- kernalEpanechnikov(testData2)
- #高斯核的数据的作图
- plot(kernalGaussian(testData2))
- #Epanechnikov核函数的作图
- plot(kernalEpanechnikov(testData2))
复制代码
[/hide]
1.3 以下是著名的Nadaraya-Waston核估计
[hide]
- ##########################################################
- #以下是著名的Nadaraya-Waston核估计
- #by fantuanxiaot
- kernalRegress <- function(xData , yData , kernalName)
- {
- if(!is.matrix(xData)||!is.matrix(yData))
- {
- stop('error input the empirical data')
- }
- #最终返回针对y的核回归拟合的值
- nData<-nrow(xData)
- if(nData!=nrow(yData))
- {
- stop('error input the data')
- }
- if (!is.character(kernalName) || !length(intersect(c('Gaussian','Epanechnikov'),kernalName)) )
- {
- stop('error input the kernal name')
- }
- yRegress <- matrix(NaN , nrow = nData , ncol = 1)
- for (i in c(1:nData))
- {
- x <- xData[i]
- xXt <- matrix(x , nrow = nData, ncol = 1) - xData
-
- if (setequal(kernalName , 'Gaussian'))
- {
- khX <- kernalGaussian(xXt)
- } else if (setequal(kernalName , 'Epanechnikov'))
- {
- khX <- kernalEpanechnikov(xXt)
- }
-
- yRegress[i] <- sum(yData*khX)/sum(khX)
-
- }
- return(yRegress)
- }
- #核回归的检测
- x<- as.matrix(rnorm(100,mean = 0,sd = 0.03))
- y<- 0.5*x + as.matrix(rnorm(100,mean = 0,sd = 0.01))
- cbind(y,kernalRegress(x,y,'Gaussian') , kernalRegress(x,y,'Epanechnikov'))
- plot(c(1:100),y,col = 'white')
- lines(c(1:100),y,col = 'blue')
- lines(c(1:100),kernalRegress(x,y,'Gaussian'),col = 'red')
- lines(c(1:100),kernalRegress(x,y,'Epanechnikov'),col = 'green')
复制代码
[/hide]
2,NNBR(最近邻回归)
一篇基本思路的Paper:
2.1,单变量NBRR源码如下:
[hide]
- #单变量的回归
- #NNBRknn近邻回归与预测
- #这里是基于单变量的时间序列
- setwd('D:/MyDriversMatlab/Mfiles13')
- rm(list=ls())
- nnbrRegress<-function (xData , lags , disType)
- {
- #lags是滞后的阶数P
- #outNum是样本外预测的个数
- #disType是计算距离的类别
- if (!is.matrix(xData))
- {
- stop('error input the Current Data')
- }
- if (ncol(xData)!= 1)
- {
- stop('error input data')
- }
- if ((nrow(xData) - lags)<1)
- {
- stop('error input the Data')
- }
- #当前的数据集合
- currentData <- xData
- k <- ceiling(sqrt(nrow(xData) - lags))
- #先计算所有的特征向量
- currentVector <- xData[c(( nrow(xData)-lags+1):(nrow(xData)))]
- currentVector <- t(as.matrix(currentVector))
- Vectors<-NULL
- xOut<-NULL
- #获取其他的特征向量
- i<-1
- while(TRUE)
- {
- if ((i+lags - 1) == (nrow(xData)))
- {
- break
- }
- vectors<- t(as.matrix(xData[(i):(i+lags - 1)]))
-
- xOut<-c(xOut,xData[i+lags])
-
- Vectors<-rbind(Vectors,vectors)
-
- i<-i+1
- }
- #再从中寻找距离最小的K个值
- nV <- nrow(Vectors)
- currData <- matrix(rep(currentVector , nV),nrow = nV,byrow = TRUE)
- Distance <- sqrt( apply((Vectors - currData)^2,1,sum) )
- #再计算距离的最小的前k个值
- #从大到小的几个下标
- Index <- order(Distance)
- Index <- Index[c(1:k)]
- xOut<-xOut[Index]
- DistanceOut <- Distance[Index]
- #对样本外的结果进行预测
- #权重的配置
- if (disType == 1)
- {
- xForecasting <- sum(DistanceOut/sum(DistanceOut)*xOut)
- } else
- {
- xForecasting <- sum((c(k:1)/k)/sum((c(k:1)/k))*xOut)
- }
- return(xForecasting)
- }
- data <- as.matrix(cumsum(matrix(rnorm(100),nrow = 100)))
- data
- nnbrRegress(data,8,1)
- nnbrRegress(data,8,2)
- #各种不同的滞后阶数
- nnbrRegress(data,10,1)
- nnbrRegress(data,10,2)
- #各种不同的滞后阶数
- nnbrRegress(data,15,1)
- nnbrRegress(data,15,2)
- #eof
复制代码 [/hide]
2.2,多变量NNBR源码如下:
[hide]
- #D:\MyDriversMatlab\Mfiles13
- setwd('D:/MyDriversMatlab/Mfiles13')
- #基于knn回归的最近邻算法
- rm(list=ls())
- nnbrRegressMultivariate<-function (xData , yData , testData)
- {
- if(nrow(xData)!=nrow(yData))
- {
- stop('error input data')
- }
- if (ncol(xData)!=ncol(testData))
- {
- stop('error input data')
- }
- if (!is.matrix(xData) ||!is.matrix(yData) || !is.matrix(testData))
- {
- stop('error input data')
- }
- #取得特征向量的个数
- k<-ceiling(sqrt(nrow(xData)))
- yForecasting<-matrix(NaN , nrow = nrow(testData),ncol = 1)
- for (i in c(1:nrow(testData)))
- {
- x<-testData[i,]
-
- xDataRep <-rep(x,nrow(xData))
- xDataRep <- matrix(xDataRep,nrow = nrow(xData) , byrow = TRUE)
- #计算距离
-
- Distance <- sqrt(apply((xData - xDataRep)^2,1,sum))
-
- Index <- order(Distance)
- #选择前k个值
- Index<-Index[c(1:k)]
- ys<-(yData[Index])
- Weight<-(1/Distance)/sum(1/Distance)
-
- yForecasting[i] <-sum(ys*Weight)
-
- }
- return(yForecasting)
- }
- xData <- matrix(rnorm(100),ncol = 4)
- yData <- as.matrix(apply(xData,1,mean))
- testData <- matrix(rnorm(40),ncol = 4)
- nnbrRegressMultivariate(xData , yData , testData)
复制代码 [/hide]
|