网站建设要买哪些软件,苏州市建设职业培训中心网站,莱阳市规划建设局网站,ocr是不是用于制作网页的软件随机森林1. 使用Boston数据集进行随机森林模型构建2. 数据集划分3.构建自变量与因变量之间的公式4. 模型训练5. 寻找合适的ntree6. 查看变量重要性并绘图展示7. 偏依赖图:Partial Dependence Plot#xff08;PDP图#xff09;8. 训练集预测结果1. 使用Boston数据集进行随机森…
随机森林1. 使用Boston数据集进行随机森林模型构建2. 数据集划分3.构建自变量与因变量之间的公式4. 模型训练5. 寻找合适的ntree6. 查看变量重要性并绘图展示7. 偏依赖图:Partial Dependence PlotPDP图8. 训练集预测结果1. 使用Boston数据集进行随机森林模型构建
library(rio)
library(ggplot2)
library(magrittr)
library(randomForest)
library(tidyverse)
library(skimr)
library(DataExplorer)
library(caret)
library(varSelRF)
library(pdp)
library(iml)
data(boston)as.data.frame(boston)
skim(boston)#数据鸟瞰
plot_missing(boston)#数据缺失
#na.roughfix() #填补缺失
hist(boston$lstat,breaks 50)数据展示
2. 数据集划分
######################################
# 1.数据集划分
set.seed(123)
trains - createDataPartition(y boston$lstat,p0.70,list F)
traindata - boston[trains,]
testdata - boston[-trains,]3.构建自变量与因变量之间的公式
#因变量自变量构建公式
colnames(boston)
form_reg - as.formula(paste0(lstat ~,paste(colnames(traindata)[1:15],collapse )))
form_reg构建的公式
4. 模型训练
#### 2.1模型mtry的最优选取mry12 % Var explained最佳
#默认情况下数据集变量个数的二次方根分类模型或1/3预测模型
set.seed(123)
n - ncol(boston)-5
errRate - c(1) #设置模型误判率向量初始值
for (i in 1:n) {rf_train - randomForest(form_reg, data traindata,ntree 1000,#决策树的棵树p 0.8,mtry i,#每个节点可供选择的变量数目importance T #输出变量的重要性)errRate[i] - mean(rf_train$mse)print(rf_train)
}
m which.min(errRate)
print(m)结果 Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 1 Mean of squared residuals: 13.35016% Var explained: 72.5Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 2 Mean of squared residuals: 11.0119% Var explained: 77.31Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 3 Mean of squared residuals: 10.51724% Var explained: 78.33Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 4 Mean of squared residuals: 10.41254% Var explained: 78.55Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 5 Mean of squared residuals: 10.335% Var explained: 78.71Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 6 Mean of squared residuals: 10.22917% Var explained: 78.93Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 7 Mean of squared residuals: 10.25744% Var explained: 78.87Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 8 Mean of squared residuals: 10.11666% Var explained: 79.16Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 9 Mean of squared residuals: 10.09725% Var explained: 79.2Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 10 Mean of squared residuals: 10.09231% Var explained: 79.21Call: randomForest(formula form_reg, data traindata, ntree 1000, p 0.8, mtry i, importance T) Type of random forest: regression Number of trees: 1000 No. of variables tried at each split: 11 Mean of squared residuals: 10.12222% Var explained: 79.15结果显示mtry为11误差最小精度最高
5. 寻找合适的ntree
#### 寻找合适的ntree
set.seed(123)
rf_train-randomForest(form_reg,datatraindata,mtry11,ntree500,importance T,proximityTRUE)
plot(rf_train,main ERROR TREES) #绘制模型误差与决策树数量关系图运行结果 6. 查看变量重要性并绘图展示
#### 变量重要性
importance-importance(rf_train) ##### 绘图法1
barplot(rf_train$importance[,1],main输入变量重要性测度指标柱形图)
box()重要性展示
##### 绘图法2
varImpPlot(rf_train,main Variable Importance plot)
varImpPlot(rf_train,main Variable Importance plot,type 1)
varImpPlot(rf_train,sortTRUE,n.varnrow(rf_train$importance),main Variable Importance plot,type 2) # 基尼系数
hist(treesize(rf_train)) #展示随机森林模型中每棵决策树的节点数
max(treesize(rf_train));
min(treesize(rf_train))“%IncMSE” 即increase in mean squared error通过对每一个预测变量随机赋值如果该预测变量更为重要那么其值被随机替换后模型预测的误差会增大。“IncNodePurity”即increase in node purity通过残差平方和来度量代表了每个变量对分类树每个节点上观测值的异质性的影响从而比较变量的重要性。两个指示值均是判断预测变量重要性的指标均是值越大表示该变量的重要性越大但分别基于两者的重要性排名存在一定的差异。 7. 偏依赖图:Partial Dependence PlotPDP图
部分依赖图可以显示目标和特征之间的关系是线性的、单调的还是更复杂的 缺点 部分依赖函数中现实的最大特征数是两个这不是PDP的错而是2维表示纸或屏幕的错是我们无法想象超过3维的错。
partialPlot(x rf_train,pred.data traindata,x.var cmedv
)PDP图
rf_train %%partial(pred.var c(cmedv, age), chull TRUE, progress TRUE) %%autoplot(contour TRUE, legend.title SOS,option B, direction -1) theme_bw()theme(textelement_text(size12, familyserif))交互结果展示
#预测与指标的关系散点图
plot(lstat ~ cmedv, data traindata)8. 训练集预测结果
#图示训练集预测结果
plot(x traindata$lstat,y trainpred,xlab 实际值,ylab 预测值,main 随机森林-实际值与预测值比较
)trainlinmod - lm(trainpred ~ traindata$lstat) #拟合回归模型
abline(trainlinmod, col blue,lwd 2.5, lty solid)
abline(a 0,b 1, col red,lwd 2.5, lty dashed)
legend(topleft,legend c(Mode1,Base),col c(blue,red),lwd 2.5,lty c(solid,dashed)) #测试集预测结果
testpred - predict(rf_train,newdata testdata)
#测试集预测误差结果
defaultSummary(data.frame(obs testdata$lstat,pred testpred))
#图示测试集结果
plot(x testdata$lstat,y testpred,xlab 实际值,ylab 预测值,main 随机森林-实际值与预测值比较
)
testlinmod - lm(testpred ~ testdata$lstat)
abline(testlinmod, col blue,lwd 2.5, lty solid)
abline(a 0,b 1, col red,lwd 2.5, lty dashed)
legend(topleft,legend c(Mode1,Base),col c(blue,red),lwd 2.5,lty c(solid,dashed))
文章转载自: http://www.morning.gqflj.cn.gov.cn.gqflj.cn http://www.morning.tyklz.cn.gov.cn.tyklz.cn http://www.morning.bpmfz.cn.gov.cn.bpmfz.cn http://www.morning.cffwm.cn.gov.cn.cffwm.cn http://www.morning.bnrff.cn.gov.cn.bnrff.cn http://www.morning.plqhb.cn.gov.cn.plqhb.cn http://www.morning.qflcb.cn.gov.cn.qflcb.cn http://www.morning.mjmtm.cn.gov.cn.mjmtm.cn http://www.morning.hqrr.cn.gov.cn.hqrr.cn http://www.morning.kaweilu.com.gov.cn.kaweilu.com http://www.morning.mqlsf.cn.gov.cn.mqlsf.cn http://www.morning.qkrz.cn.gov.cn.qkrz.cn http://www.morning.elbae.cn.gov.cn.elbae.cn http://www.morning.jyfrz.cn.gov.cn.jyfrz.cn http://www.morning.zrks.cn.gov.cn.zrks.cn http://www.morning.gwdkg.cn.gov.cn.gwdkg.cn http://www.morning.ngkng.cn.gov.cn.ngkng.cn http://www.morning.qtryb.cn.gov.cn.qtryb.cn http://www.morning.tyjnr.cn.gov.cn.tyjnr.cn http://www.morning.pqndg.cn.gov.cn.pqndg.cn http://www.morning.dpflt.cn.gov.cn.dpflt.cn http://www.morning.yxwrr.cn.gov.cn.yxwrr.cn http://www.morning.gcszn.cn.gov.cn.gcszn.cn http://www.morning.pjwfs.cn.gov.cn.pjwfs.cn http://www.morning.fwlch.cn.gov.cn.fwlch.cn http://www.morning.gqcd.cn.gov.cn.gqcd.cn http://www.morning.nbybb.cn.gov.cn.nbybb.cn http://www.morning.junmap.com.gov.cn.junmap.com http://www.morning.rkxqh.cn.gov.cn.rkxqh.cn http://www.morning.benqc.com.gov.cn.benqc.com http://www.morning.hgsmz.cn.gov.cn.hgsmz.cn http://www.morning.pkrtz.cn.gov.cn.pkrtz.cn http://www.morning.pmftz.cn.gov.cn.pmftz.cn http://www.morning.nnwpz.cn.gov.cn.nnwpz.cn http://www.morning.ycmpk.cn.gov.cn.ycmpk.cn http://www.morning.gqbks.cn.gov.cn.gqbks.cn http://www.morning.ltkms.cn.gov.cn.ltkms.cn http://www.morning.dbqg.cn.gov.cn.dbqg.cn http://www.morning.mbpzw.cn.gov.cn.mbpzw.cn http://www.morning.fkgcd.cn.gov.cn.fkgcd.cn http://www.morning.ghxkm.cn.gov.cn.ghxkm.cn http://www.morning.kmcfw.cn.gov.cn.kmcfw.cn http://www.morning.tldhq.cn.gov.cn.tldhq.cn http://www.morning.jcwrb.cn.gov.cn.jcwrb.cn http://www.morning.mqxzh.cn.gov.cn.mqxzh.cn http://www.morning.nkqrq.cn.gov.cn.nkqrq.cn http://www.morning.lpsjs.com.gov.cn.lpsjs.com http://www.morning.fpyll.cn.gov.cn.fpyll.cn http://www.morning.hwbmn.cn.gov.cn.hwbmn.cn http://www.morning.dyght.cn.gov.cn.dyght.cn http://www.morning.qbjrf.cn.gov.cn.qbjrf.cn http://www.morning.xtdms.com.gov.cn.xtdms.com http://www.morning.drggr.cn.gov.cn.drggr.cn http://www.morning.tzkrh.cn.gov.cn.tzkrh.cn http://www.morning.xwnnp.cn.gov.cn.xwnnp.cn http://www.morning.mxnhq.cn.gov.cn.mxnhq.cn http://www.morning.wrtxk.cn.gov.cn.wrtxk.cn http://www.morning.kksjr.cn.gov.cn.kksjr.cn http://www.morning.yzxhk.cn.gov.cn.yzxhk.cn http://www.morning.fmrrr.cn.gov.cn.fmrrr.cn http://www.morning.jkftn.cn.gov.cn.jkftn.cn http://www.morning.wfhnz.cn.gov.cn.wfhnz.cn http://www.morning.sqhtg.cn.gov.cn.sqhtg.cn http://www.morning.smzr.cn.gov.cn.smzr.cn http://www.morning.bpwz.cn.gov.cn.bpwz.cn http://www.morning.gcjhh.cn.gov.cn.gcjhh.cn http://www.morning.jpbpc.cn.gov.cn.jpbpc.cn http://www.morning.fyglr.cn.gov.cn.fyglr.cn http://www.morning.mzrqj.cn.gov.cn.mzrqj.cn http://www.morning.yhjrc.cn.gov.cn.yhjrc.cn http://www.morning.dblfl.cn.gov.cn.dblfl.cn http://www.morning.stmkm.cn.gov.cn.stmkm.cn http://www.morning.fthcq.cn.gov.cn.fthcq.cn http://www.morning.lgwpm.cn.gov.cn.lgwpm.cn http://www.morning.xlztn.cn.gov.cn.xlztn.cn http://www.morning.bmbnc.cn.gov.cn.bmbnc.cn http://www.morning.kngqd.cn.gov.cn.kngqd.cn http://www.morning.mcndn.cn.gov.cn.mcndn.cn http://www.morning.jkftn.cn.gov.cn.jkftn.cn http://www.morning.lmzpk.cn.gov.cn.lmzpk.cn