Newer
Older
notebooks / ccn2019-svm.R
library(tidyverse)
library(caret)
library(here)
library(inspectdf)
library(glmnet)
library(ROSE)

rm(seqs)
load(here("notebooks/data/nback_seqs.Rd"))

f <- correct ~ n + stimulus_type + stimulus + t + s + v + l + vl + sl + tl + ul + ll

set.seed(654321)

seqs <- seqs %>%drop_na(rt, correct, tl,sl)

train.indices <- createDataPartition(seqs[[toString(f[[2]])]], p = .8, list =FALSE)

seqs.train.balanced <- seqs[train.indices,]
seqs.train <- seqs.train.balanced

seqs.train.x <- model.matrix(f, seqs.train)[,-1]
seqs.train.y <- seqs.train[[toString(f[[2]])]]

seqs.test  <- seqs[-train.indices,]
seqs.test.x <-  model.matrix(f, seqs.test)[,-1]
seqs.test.observed_y <- seqs.test[[toString(f[[2]])]]

ctrl <- trainControl(method="cv",
                     number=10,
#                     repeats = 1, #repeatedcv
                     sampling = "up",
                     savePredictions = T,
                     verbose = T)

tune <- expand.grid(C = seq(0,5,by=0.25))

model <- train(seqs.train.x,
               seqs.train.y, 
               method = "svmLinear",
               preProc = c("center", "scale"),
               tuneLength = 10,
               tuneGrid = tune,
               trControl = ctrl)

model$bestTune
plot(model)

seqs.test.y <- model %>% predict(seqs.test.x)
seqs.test.y_prob <- model %>% predict(seqs.test.x, type="prob")

confusionMatrix(seqs.test.y, seqs.test.observed_y)

plot(varImp(model, scale = F, useModel = F))

library(pROC)

roc(seqs.test.observed_y,
    seqs.test.y_prob$YES,
    legacy.axes=T,
    plot = T,
    lwd=2,
    col="black",
    print.auc=T,
    percent = T,
    print.auc.y = 40,
    print.auc.x = 55,
    lty = 1,
    of = "se",
    boot.n = 200,
    ci = T)


# RT
# data.frame(
#   RMSE = RMSE(y.test, seqs.test$correct),
#   Rsquare = R2(y.test, seqs.test$correct)
# )