Skip to content

Commit

Permalink
Reduce the computing time of the toy examples
Browse files Browse the repository at this point in the history
  • Loading branch information
wenjie2wang committed Apr 12, 2022
1 parent 600b00d commit ba606cb
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 110 deletions.
4 changes: 2 additions & 2 deletions R/abclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
##' @param max_iter A positive integer specifying the maximum number of
##' iteration. The default value is \code{10^5}.
##' @param epsilon A positive number specifying the relative tolerance that
##' determines convergence. The default value is \code{1e-5}.
##' determines convergence. The default value is \code{1e-3}.
##' @param standardize A logical value indicating if each column of the design
##' matrix should be standardized internally to have mean zero and standard
##' deviation equal to the sample size. The default value is \code{TRUE}.
Expand Down Expand Up @@ -120,7 +120,7 @@ abclass <- function(x, y,
lum_c = 1.0,
boost_umin = -5.0,
max_iter = 1e5,
epsilon = 1e-4,
epsilon = 1e-3,
standardize = TRUE,
varying_active_set = TRUE,
verbose = 0,
Expand Down
62 changes: 9 additions & 53 deletions inst/examples/ex-abclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ set.seed(123)
## toy examples for demonstration purpose
## reference: example 1 in Zhang and Liu (2014)
ntrain <- 100 # size of training set
ntest <- 1000 # size of testing set
p0 <- 10 # number of actual predictors
p1 <- 100 # number of random predictors
ntest <- 100 # size of testing set
p0 <- 5 # number of actual predictors
p1 <- 5 # number of random predictors
k <- 5 # number of categories

n <- ntrain + ntest; p <- p0 + p1
Expand All @@ -24,60 +24,16 @@ y <- factor(paste0("label_", y))
train_y <- y[train_idx]
test_y <- y[- train_idx]

### Regularization through elastic-net penalty
## logistic deviance loss
model1 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
loss = "logistic", lambda_min_ratio = 1e-4)
## Regularization through ridge penalty
model1 <- abclass(train_x, train_y, nlambda = 5, nfolds = 3,
loss = "logistic", alpha = 0, lambda_min_ratio = 1e-2)
pred1 <- predict(model1, test_x)
table(test_y, pred1)
mean(test_y == pred1) # accuracy

## exponential loss approximating AdaBoost
model2 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
loss = "boost", rel_tol = 1e-3)
## groupwise regularization via group lasso
model2 <- abclass(train_x, train_y, nlambda = 5, nfolds = 3,
grouped = TRUE, loss = "boost")
pred2 <- predict(model2, test_x)
table(test_y, pred2)
mean(test_y == pred2) # accuracy

## hybrid hinge-boost loss
model3 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
loss = "hinge-boost", rel_tol = 1e-3)
pred3 <- predict(model3, test_x)
table(test_y, pred3)
mean(test_y == pred3) # accuracy

## large-margin unified loss
model4 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
loss = "lum", rel_tol = 1e-3)
pred4 <- predict(model4, test_x)
table(test_y, pred4)
mean(test_y == pred4) # accuracy

### groupwise regularization via group lasso
## logistic deviance loss
model1 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
grouped = TRUE, loss = "logistic", rel_tol = 1e-3)
pred1 <- predict(model1, test_x)
table(test_y, pred1)
mean(test_y == pred1) # accuracy

## exponential loss approximating AdaBoost
model2 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
grouped = TRUE, loss = "boost", rel_tol = 1e-3)
pred2 <- predict(model2, test_x)
table(test_y, pred2)
mean(test_y == pred2) # accuracy

## hybrid hinge-boost loss
model3 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
grouped = TRUE, loss = "hinge-boost", rel_tol = 1e-3)
pred3 <- predict(model3, test_x)
table(test_y, pred3)
mean(test_y == pred3) # accuracy

## large-margin unified loss
model4 <- abclass(train_x, train_y, nlambda = 10, nfolds = 3,
grouped = TRUE, loss = "lum", rel_tol = 1e-3)
pred4 <- predict(model4, test_x)
table(test_y, pred4)
mean(test_y == pred4) # accuracy
66 changes: 11 additions & 55 deletions man/abclass.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit ba606cb

Please sign in to comment.