尽管特征工程在现实生活中非常重要,但是树木(和随机森林)非常擅长寻找形式的相互作用项x*y
。这是具有双向交互作用的回归的玩具示例。将朴素的线性模型与一棵树和一袋树进行比较(这是随机森林的一种更简单的选择)。
如您所见,树本身很擅长发现交互作用,但是在此示例中,线性模型并不好。
# fake data
x <- rnorm(1000, sd=3)
y <- rnorm(1000, sd=3)
z <- x + y + 10*x*y + rnorm(1000, 0, 0.2)
dat <- data.frame(x, y, z)
# test and train split
test <- sample(1:nrow(dat), 200)
train <- (1:1000)[-test]
# bag of trees model function
boot_tree <- function(formula, dat, N=100){
models <- list()
for (i in 1:N){
models[[i]] <- rpart(formula, dat[sample(nrow(dat), nrow(dat), replace=T), ])
}
class(models) <- "boot_tree"
models
}
# prediction function for bag of trees
predict.boot_tree <- function(models, newdat){
preds <- matrix(0, nc=length(models), nr=nrow(newdat))
for (i in 1:length(models)){
preds[,i] <- predict(models[[i]], newdat)
}
apply(preds, 1, function(x) mean(x, trim=0.1))
}
## Fit models and predict:
# linear model
model1 <- lm(z ~ x + y, data=dat[train,])
pred1 <- predict(model1, dat[test,])
# tree
require(rpart)
model2 <- rpart(z ~ x + y, data=dat[train,])
pred2 <- predict(model2, dat[test,])
# bag of trees
model3 <- boot_tree("z ~ x+y", dat)
pred3 <- predict(model3, dat[test,])
ylim = range(c(pred1, pred2, pred3))
# plot predictions and true z
plot(dat$z[test], predict(model1, dat[test,]), pch=19, xlab="Actual z",
ylab="Predicted z", ylim=ylim)
points(dat$z[test], predict(model2, dat[test,]), col="green", pch=19)
points(dat$z[test], predict(model3, dat[test,]), col="blue", pch=19)
abline(0, 1, lwd=3, col="orange")
legend("topleft", pch=rep(19,3), col=c("black", "green", "blue"),
legend=c("Linear", "Tree", "Forest"))