library('e1071')
### simulated example
alpha <- 0.2; beta <- 0.8; # the following assumes alpha < 0.5 < beta
rexemple <- function(n){
res <- list();
res$x <- cbind(runif(n), runif(n));
res$y <- array(n);
u <- runif(n);
T = 2 * res$x[,1] + res$x[,2] < 1.5;
res$y[T] <- u[T] 1.5;}
bayesError <- evaluateClassifError(bayesClassifier)
svm <- svm(train$x, as.factor(train$y), fqype='C', kernel='polynomial', cost = 2, gamma = 0.5) # linear, polynomial, radial, sigmoid
summary(model1)
test <- rexemple(20000);
test$guessed <- predict(svm, test$x);
svmError <- mean(test$y != test$guessed)
#jpeg("illuSVM.jpg")
plot(test$x[,1], test$x[,2], col = c("pink", "cyan")[as.numeric(test$guessed)], pch = 19, xlim = c(-0.2, 1.2), ylim = c(-0.2, 1.2))
points(train$x[,1], train$x[,2], col = c("red", "blue")[1+train$y], pch = 19)
lines(c(0, 1), c(1.5, -0.5), col="green", lwd = 2)
# graph legend and result prints
title("simulated data classification")
legend(0.8, 1.2, c("true frontier", "SVM zone 1", "SVM zone 0"), col = c("green", "cyan", "pink"), lty=1);
cat("Bayes classification error:\t", bayesError, "\nSVM classifier error:\t", svmError, "\n")
#dev.off()
# a more R-stylish way of doing approximately the same:
t = data.frame(x1 = train$x[,1], x2 = train$x[, 2], y = as.factor(train$y))
svm <- svm(y~x2+x1, data=t%2c.html fqype='C', kernel='polynomial', cost = 0.5, gamma = 1)
plot(svm, t, grid = 200)