require(tree) ### simulated example alpha <- 0.2; beta <- 0.8; # the following assumes alpha < 0.5 < beta rexemple <- function(n){ res <- list(); res$x <- cbind(runif(n), runif(n)); res$y <- array(n); u <- runif(n); T = 2 * res$x[,1] + res$x[,2] < 1.5; res$y[T] <- u[T] 1.5;} bayesError <- evaluateClassifError(bayesClassifier) y = factor(train$y); x1 = train$x[,1]; x2 = train$x[,2]; fullTree <- tree(y~x1+x2, control = tree.control(n, mindev=0, minsize=2)) # fullTree <- tree(train$y ~ train$x[,1] + train$x[,2], control = tree.control(n, mindev=0, minsize=2)) cartClassifier <- prune.tree(fullTree, best = 10) test <- rexemple(20000); # could also use LHS here test$estimProba <- predict.tree(cartClassifier, data.frame(x1=test$x[,1], x2=test$x[,2])) test$guessed <- test$estimProba[,2]>0.5; cartError <- mean(test$y != test$guessed) plot(test$x[,1], test$x[,2], col = c("pink", "cyan")[as.numeric(1+test$guessed)], pch = 19, xlim = c(-0.2, 1.2), ylim = c(-0.2, 1.2)) points(train$x[,1], train$x[,2], col = c("red", "blue")[1+train$y], pch = 19) lines(c(0, 1), c(1.5, -0.5), col="green", lwd = 2) f <- cartClassifier$frame; lx <- c(); ly <- c(); for (j in 1:dim(f)[1]){ var <- f[j,1]; if((var == 'x1') || (var == 'x2')){ b <- as.double(substr(f[j,5][1], 2,5)) if (var == 'x1'){ lx <- c(lx,b); lines(c(b,b), c(0,1)); } else{ ly <- c(ly, b); lines(c(0,1), c(b,b)); } } } # graph legend and result prints title("simulated data classification") legend(0.8, 1.2, c("true frontier", "CART zone 1", "CART zone 0"), col = c("black", "cyan", "pink"), lty=1); cat("Bayes classification error:\t", bayesError, "\nCART classifier error:\t", cartError, "\n") # to plot the partition tree, simply type x11(); partition.tree(cartClassifier, col="blue", cex = 2); points(train$x[,1], train$x[,2], col = c("red", "blue")[1+train$y], pch = 19) lines(c(0, 1), c(1.5, -0.5), col="green", lwd = 2) ### iris dataset #data(iris) #ir.tr <- tree(Species ~., iris) #ir.tr #summary(ir.tr) #plot(ir.tr) #text(ir.tr)