Commit 55267e0c authored by Armin Rauschenberger's avatar Armin Rauschenberger
Browse files

automation

parent f3ed33bc
Package: cornet Package: cornet
Version: 0.0.0 Version: 0.0.1
Title: Elastic Net for Dichotomised Outcomes Title: Elastic Net for Dichotomised Outcomes
Description: Implements lasso and ridge regression for dichotomised outcomes. Description: Implements lasso and ridge regression for dichotomised outcomes.
Depends: R (>= 3.0.0) Depends: R (>= 3.0.0)
Imports: glmnet Imports: glmnet
Suggests: knitr, testthat, RColorBrewer Suggests: knitr, testthat
Enhances: RColorBrewer
Authors@R: person("Armin","Rauschenberger",email="a.rauschenberger@vumc.nl",role=c("aut","cre")) Authors@R: person("Armin","Rauschenberger",email="a.rauschenberger@vumc.nl",role=c("aut","cre"))
VignetteBuilder: knitr VignetteBuilder: knitr
License: GPL-3 License: GPL-3
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
#--- Workhorse function -------------------------------------------------------- #--- Workhorse function --------------------------------------------------------
#' @export #' @export
#' @aliases cornet-package
#' @title #' @title
#' Logistic regression with a continuous response #' Logistic regression with a continuous response
#' #'
...@@ -57,10 +58,29 @@ ...@@ -57,10 +58,29 @@
#' further arguments passed to \code{\link[glmnet]{glmnet}} #' further arguments passed to \code{\link[glmnet]{glmnet}}
#' #'
#' @details #' @details
#' - INCLUDE note on deviance (not comparable between lin and log models) #' This function fits a \code{"gaussian"} model for the numeric response,
#' - alpha: elastic net parameter\strong{:} #' and a \code{"binomial"} model for the binary response,
#' numeric between \eqn{0} (ridge) and \eqn{1} (lasso) #' meaning that the \code{glmnet} argument \code{family} is unavailable.
#' - do not use "family" #' Also if \code{type.measure} equals \code{"deviance"},
#' the loss is uncomparable between linear and logistic regression.
#'
#' @return
#' Returns an object of class \code{cornet}, a list with multiple slots:
#' \itemize{
#' \item \code{"gaussian"}: fitted linear model, class \code{glmnet}
#' \item \code{"binomial"}: fitted logistic model, class \code{glmnet}
#' \item \code{"sigma"}: scaling parameters \code{sigma},
#' vector of length \code{nsigma}
#' \item \code{"pi"}: weighting parameters \code{pi},
#' vector of length \code{npi}
#' \item \code{cvm}: evaluation loss,
#' matrix with \code{nsigma} rows and \code{npi} columns
#' \item \code{sigma.min}: optimal scaling parameter,
#' positive scalar
#' \item \code{pi.min}: optimal weighting parameter,
#' scalar in unit interval
#' \item \code{cutoff}: threshold for dichotomisation
#' }
#' #'
#' @examples #' @examples
#' n <- 100; p <- 200 #' n <- 100; p <- 200
...@@ -68,14 +88,11 @@ ...@@ -68,14 +88,11 @@
#' X <- matrix(rnorm(n*p),nrow=n,ncol=p) #' X <- matrix(rnorm(n*p),nrow=n,ncol=p)
#' net <- cornet(y=y,cutoff=0,X=X) #' net <- cornet(y=y,cutoff=0,X=X)
#' #'
#' ### Add ... to all glmnet::glmnet calls !!! ###
#'
cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfolds=10,foldid=NULL,type.measure="deviance",...){ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfolds=10,foldid=NULL,type.measure="deviance",...){
#--- temporary --- #--- temporary ---
# cutoff <- 0; npi <- 101; pi <- NULL; nsigma <- 99; sigma <- NULL; nfolds <- 10; foldid <- NULL; type.measure <- "deviance"; logistic <- TRUE # cutoff <- 0; npi <- 101; pi <- NULL; nsigma <- 99; sigma <- NULL; nfolds <- 10; foldid <- NULL; type.measure <- "deviance"; logistic <- TRUE
test <- list() test <- list()
test$sigma <- test$pi <- FALSE
test$combined <- TRUE test$combined <- TRUE
#--- checks --- #--- checks ---
...@@ -105,8 +122,8 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold ...@@ -105,8 +122,8 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold
#--- model fitting --- #--- model fitting ---
fit <- list() fit <- list()
fit$gaussian <- glmnet::glmnet(y=y,x=X,family="gaussian",alpha=alpha) fit$gaussian <- glmnet::glmnet(y=y,x=X,family="gaussian",alpha=alpha,...)
fit$binomial <- glmnet::glmnet(y=z,x=X,family="binomial",alpha=alpha) fit$binomial <- glmnet::glmnet(y=z,x=X,family="binomial",alpha=alpha,...)
#--- sigma sequence --- #--- sigma sequence ---
if(is.null(sigma)){ if(is.null(sigma)){
...@@ -136,12 +153,6 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold ...@@ -136,12 +153,6 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold
pred$y <- matrix(data=NA,nrow=n,ncol=length(fit$gaussian$lambda)) pred$y <- matrix(data=NA,nrow=n,ncol=length(fit$gaussian$lambda))
pred$z <- matrix(data=NA,nrow=n,ncol=length(fit$binomial$lambda)) pred$z <- matrix(data=NA,nrow=n,ncol=length(fit$binomial$lambda))
if(test$sigma){
pred$sigma <- matrix(data=NA,nrow=n,ncol=nsigma)
}
if(test$pi){
pred$pi <- matrix(data=NA,nrow=n,ncol=npi)
}
if(test$combined){ if(test$combined){
dimnames <- list(NULL,lab.sigma,lab.pi) dimnames <- list(NULL,lab.sigma,lab.pi)
pred$combined <- array(data=NA,dim=c(n,nsigma,npi),dimnames=dimnames) pred$combined <- array(data=NA,dim=c(n,nsigma,npi),dimnames=dimnames)
...@@ -157,35 +168,20 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold ...@@ -157,35 +168,20 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold
X1 <- X[foldid==k,,drop=FALSE] X1 <- X[foldid==k,,drop=FALSE]
# linear regression # linear regression
net <- glmnet::glmnet(y=y0,x=X0,family="gaussian",alpha=alpha) net <- glmnet::glmnet(y=y0,x=X0,family="gaussian",alpha=alpha,...)
temp_y <- stats::predict(object=net,newx=X1,type="response",s=fit$gaussian$lambda) temp_y <- stats::predict(object=net,newx=X1,type="response",s=fit$gaussian$lambda)
pred$y[foldid==k,seq_len(ncol(temp_y))] <- temp_y pred$y[foldid==k,seq_len(ncol(temp_y))] <- temp_y
cvm <- cornet:::.loss(y=y1,fit=temp_y,family="gaussian",type.measure="deviance")[[1]] cvm <- cornet:::.loss(y=y1,fit=temp_y,family="gaussian",type.measure="deviance")[[1]]
y_hat <- temp_y[,which.min(cvm)] y_hat <- temp_y[,which.min(cvm)]
# logistic regression # logistic regression
net <- glmnet::glmnet(y=z0,x=X0,family="binomial",alpha=alpha) net <- glmnet::glmnet(y=z0,x=X0,family="binomial",alpha=alpha,...)
temp_z <- stats::predict(object=net,newx=X1,type="response",s=fit$binomial$lambda) temp_z <- stats::predict(object=net,newx=X1,type="response",s=fit$binomial$lambda)
pred$z[foldid==k,seq_len(ncol(temp_z))] <- temp_z pred$z[foldid==k,seq_len(ncol(temp_z))] <- temp_z
cvm <- cornet:::.loss(y=z1,fit=temp_z,family="binomial",type.measure=type.measure)[[1]] cvm <- cornet:::.loss(y=z1,fit=temp_z,family="binomial",type.measure=type.measure)[[1]]
z_hat <- temp_z[,which.min(cvm)] z_hat <- temp_z[,which.min(cvm)]
# fusion (sigma) # combined regression
if(test$sigma){
for(i in seq_along(fit$sigma)){
#pred$sigma[foldid==k,i] <- stats::pnorm(q=y_hat,mean=cutoff,sd=fit$sigma[i])
}
}
# fusion (pi)
if(test$pi){
for(i in seq_along(fit$pi)){
#cont <- stats::pnorm(q=y_hat,mean=cutoff,sd=stats::sd(y))
#pred$pi[foldid==k,i] <- fit$pi[i]*cont + (1-fit$pi[i])*z_hat
}
}
# fusion (combined)
if(test$combined){ if(test$combined){
for(i in seq_along(fit$sigma)){ for(i in seq_along(fit$sigma)){
for(j in seq_along(fit$pi)){ for(j in seq_along(fit$pi)){
...@@ -194,27 +190,20 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold ...@@ -194,27 +190,20 @@ cornet <- function(y,cutoff,X,alpha=1,npi=101,pi=NULL,nsigma=99,sigma=NULL,nfold
} }
} }
} }
} }
#--- evaluation --- #--- evaluation ---
# deviance (not comparable between Gaussian and binomial families) # linear loss
fit$gaussian$cvm <- cornet:::.loss(y=y,fit=pred$y,family="gaussian",type.measure="deviance")[[1]] fit$gaussian$cvm <- cornet:::.loss(y=y,fit=pred$y,family="gaussian",type.measure="deviance")[[1]]
fit$gaussian$lambda.min <- fit$gaussian$lambda[which.min(fit$gaussian$cvm)] fit$gaussian$lambda.min <- fit$gaussian$lambda[which.min(fit$gaussian$cvm)]
# logistic loss
fit$binomial$cvm <- cornet:::.loss(y=z,fit=pred$z,family="binomial",type.measure=type.measure)[[1]] fit$binomial$cvm <- cornet:::.loss(y=z,fit=pred$z,family="binomial",type.measure=type.measure)[[1]]
fit$binomial$lambda.min <- fit$binomial$lambda[which.min(fit$binomial$cvm)] fit$binomial$lambda.min <- fit$binomial$lambda[which.min(fit$binomial$cvm)]
if(test$sigma){ # combined loss
#fit$sigma.cvm <- cornet:::.loss(y=z,fit=pred$sigma,family="binomial",type.measure=type.measure)[[1]]
#fit$sigma.min1 <- fit$sigma[which.min(fit$sigma.cvm)]
}
if(test$pi){
#fit$pi.cvm <- cornet:::.loss(y=z,fit=pred$pi,family="binomial",type.measure=type.measure)[[1]] # trial
#fit$pi.min1 <- fit$pi[which.min(fit$pi.cvm)]
}
if(test$combined){ if(test$combined){
dimnames <- list(lab.sigma,lab.pi) dimnames <- list(lab.sigma,lab.pi)
fit$cvm <- matrix(data=NA,nrow=nsigma,ncol=npi,dimnames=dimnames) fit$cvm <- matrix(data=NA,nrow=nsigma,ncol=npi,dimnames=dimnames)
...@@ -306,7 +295,7 @@ plot.cornet <- function(x,...){ ...@@ -306,7 +295,7 @@ plot.cornet <- function(x,...){
k <- 100 k <- 100
levels <- stats::quantile(x$cvm,probs=seq(from=0,to=1,length.out=k+1)) levels <- stats::quantile(x$cvm,probs=seq(from=0,to=1,length.out=k+1))
## RColorBrewer # colours
if("RColorBrewer" %in% .packages(all.available=TRUE)){ if("RColorBrewer" %in% .packages(all.available=TRUE)){
pal <- rev(c("white",RColorBrewer::brewer.pal(n=9,name="Blues"))) pal <- rev(c("white",RColorBrewer::brewer.pal(n=9,name="Blues")))
col <- grDevices::colorRampPalette(colors=pal)(k) col <- grDevices::colorRampPalette(colors=pal)(k)
...@@ -390,7 +379,7 @@ predict.cornet <- function(object,newx,type="probability",...){ ...@@ -390,7 +379,7 @@ predict.cornet <- function(object,newx,type="probability",...){
.check(x=newx,type="matrix") .check(x=newx,type="matrix")
.check(x=type,type="string",values=c("probability","odds","log-odds")) .check(x=type,type="string",values=c("probability","odds","log-odds"))
# linear, logistic and mixed # linear and logistic
prob <- list() prob <- list()
link <- as.numeric(stats::predict(object=x$gaussian, link <- as.numeric(stats::predict(object=x$gaussian,
newx=newx,s=x$gaussian$lambda.min,type="response")) newx=newx,s=x$gaussian$lambda.min,type="response"))
...@@ -398,14 +387,7 @@ predict.cornet <- function(object,newx,type="probability",...){ ...@@ -398,14 +387,7 @@ predict.cornet <- function(object,newx,type="probability",...){
prob$binomial <- as.numeric(stats::predict(object=x$binomial, prob$binomial <- as.numeric(stats::predict(object=x$binomial,
newx=newx,s=x$binomial$lambda.min,type="response")) newx=newx,s=x$binomial$lambda.min,type="response"))
if(test$sigma){ # combined
#prob$sigma <- stats::pnorm(q=link,mean=x$cutoff,sd=x$sigma.min)
}
if(test$pi){
#prob$pi <- x$pi.min*prob$gaussian + (1-x$pi.min)*prob$binomial
}
if(test$combined){ if(test$combined){
cont <- stats::pnorm(q=link,mean=x$cutoff,sd=x$sigma.min) cont <- stats::pnorm(q=link,mean=x$cutoff,sd=x$sigma.min)
prob$combined <- x$pi.min*cont + (1-x$pi.min)*prob$binomial prob$combined <- x$pi.min*cont + (1-x$pi.min)*prob$binomial
...@@ -433,7 +415,7 @@ predict.cornet <- function(object,newx,type="probability",...){ ...@@ -433,7 +415,7 @@ predict.cornet <- function(object,newx,type="probability",...){
#' @export #' @export
#' @title #' @title
#' Comparison #' Performance measurement by cross-validation
#' #'
#' @description #' @description
#' Compares models for a continuous response with a cutoff value. #' Compares models for a continuous response with a cutoff value.
...@@ -492,18 +474,6 @@ predict.cornet <- function(object,newx,type="probability",...){ ...@@ -492,18 +474,6 @@ predict.cornet <- function(object,newx,type="probability",...){
# residual increase/decrease # residual increase/decrease
loss$resid.factor <- stats::median((rys-rxs)/rxs) loss$resid.factor <- stats::median((rys-rxs)/rxs)
if(FALSE){# tests
# equality deviance
loss$deviance["binomial"]==mean(res[,"binomial"])
loss$deviance["combined"]==mean(res[,"combined"])
# percentage decrease
#range((rys-rxs)/rxs)
stats::median((rys-rxs)/rxs)
mean((rys-rxs)/rxs)
(sum(rys)-sum(rxs))/sum(rxs)
(loss$deviance["combined"]-loss$deviance["binomial"])/loss$deviance["binomial"]
}
# paired test for each fold # paired test for each fold
loss$resid.pvalue <- numeric() loss$resid.pvalue <- numeric()
for(i in seq_len(nfolds)){ for(i in seq_len(nfolds)){
...@@ -569,17 +539,6 @@ predict.cornet <- function(object,newx,type="probability",...){ ...@@ -569,17 +539,6 @@ predict.cornet <- function(object,newx,type="probability",...){
return(list(y=y,X=X)) return(list(y=y,X=X))
} }
#--- start trial ---
if(FALSE){
n <- 1000
y_hat <- runif(n)
y <- y_hat > 0.9
y <- rbinom(n=n,size=1,prob=0.5)
foldid <- rep(1:10,length.out=n)
.loss(y=y,fit=y_hat,family="binomial",type.measure="auc",foldid=foldid)
}
#--- end trial ---
#--- Internal functions -------------------------------------------------------- #--- Internal functions --------------------------------------------------------
#' @title #' @title
...@@ -685,8 +644,7 @@ if(FALSE){ ...@@ -685,8 +644,7 @@ if(FALSE){
return(invisible(NULL)) return(invisible(NULL))
} }
# Import this function from the palasso package.
# Correct this function in the palasso package (search twice for "# typo").
.loss <- function (y,fit,family,type.measure,foldid=NULL){ .loss <- function (y,fit,family,type.measure,foldid=NULL){
if (!is.list(fit)) { if (!is.list(fit)) {
fit <- list(fit) fit <- list(fit)
...@@ -810,44 +768,3 @@ if(FALSE){ ...@@ -810,44 +768,3 @@ if(FALSE){
} }
return(foldid) return(foldid)
} }
#--- Lost and found ------------------------------------------------------------
# calibrate (for cornet)
#if(test$calibrate){
# fit$calibrate <- CalibratR::calibrate(actual=z,predicted=pred$y[,which.min(fit$gaussian$cvm)],nCores=1,model_idx=5)$calibration_models
#}
# calibrate (for predict.cornet)
#if(test$calibrate){
# prob$calibrate <- CalibratR::predict_calibratR(calibration_models=x$calibrate,new=link,nCores=1)$GUESS_2
#}
.args <- function(...){
args <- list(...)
names <- names(formals(glmnet::glmnet))
if(!is.null(args$family)){
warning("Unexpected argument \"family\".",call.=FALSE)
}
if(any(!names(args) %in% names)){
stop("Unexpected argument.",call.=FALSE)
}
if(is.null(args$alpha)) {
args$alpha <- 1
}
if(is.null(args$nlambda)){
args$nlambda <- 100
}
if(is.null(args$lambda)){
if(is.null(args$nlambda)){
args$nlambda <- 100
}
} else {
args$nlambda <- length(args$lambda)
}
return(args)
}
--- ---
pagetitle: palasso
output: github_document output: github_document
editor_options:
chunk_output_type: console
--- ---
<!-- Modify xxx.Rmd, not xxx.md! --> <!-- Modify xxx.Rmd, not xxx.md! -->
...@@ -13,13 +15,31 @@ knitr::opts_chunk$set( ...@@ -13,13 +15,31 @@ knitr::opts_chunk$set(
) )
``` ```
[![Travis-CI Build Status](https://travis-ci.org/rauschenberger/cornet.svg)](https://travis-ci.org/rauschenberger/cornet) [![Travis-CI Build Status](https://travis-ci.org/rauschenberger/cornet.svg)](https://travis-ci.org/rauschenberger/cornet)
[![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/rauschenberger/cornet?svg=true)](https://ci.appveyor.com/project/rauschenberger/cornet) [![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/rauschenberger/cornet?svg=true)](https://ci.appveyor.com/project/rauschenberger/cornet)
[![Coverage Status](https://codecov.io/github/rauschenberger/cornet/coverage.svg?branch=master)](https://codecov.io/github/rauschenberger/cornet)
## Scope
Lasso and ridge regression for dichotomised outcomes (extending [glmnet](https://CRAN.R-project.org/package=glmnet)).
## Installation
Install the current release from [CRAN](https://CRAN.R-project.org/package=cornet),
or the latest development version from [GitHub](https://github.com/rauschenberger/cornet):
```{r,eval=FALSE} ```{r,eval=FALSE}
install.packages("cornet")
#install.packages("devtools") #install.packages("devtools")
devtools::install_github("rauschenberger/cornet") devtools::install_github("rauschenberger/cornet")
``` ```
## Reference
A Rauschenberger, and E Glaab (2019). "Lasso and ridge regression for dichotomised outcomes". Manuscript in preparation.
<!-- Modify xxx.Rmd, not xxx.md! --> <!-- Modify xxx.Rmd, not xxx.md! -->
[![Travis-CI Build Status](https://travis-ci.org/rauschenberger/cornet.svg)](https://travis-ci.org/rauschenberger/cornet) [![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/rauschenberger/cornet?svg=true)](https://ci.appveyor.com/project/rauschenberger/cornet) [![Travis-CI Build Status](https://travis-ci.org/rauschenberger/cornet.svg)](https://travis-ci.org/rauschenberger/cornet) [![AppVeyor build status](https://ci.appveyor.com/api/projects/status/github/rauschenberger/cornet?svg=true)](https://ci.appveyor.com/project/rauschenberger/cornet) [![Coverage Status](https://codecov.io/github/rauschenberger/cornet/coverage.svg?branch=master)](https://codecov.io/github/rauschenberger/cornet)
Scope
-----
Lasso and ridge regression for dichotomised outcomes (extending [glmnet](https://CRAN.R-project.org/package=glmnet)).
Installation
------------
Install the current release from [CRAN](https://CRAN.R-project.org/package=cornet), or the latest development version from [GitHub](https://github.com/rauschenberger/cornet):
``` r ``` r
install.packages("cornet")
#install.packages("devtools") #install.packages("devtools")
devtools::install_github("rauschenberger/cornet") devtools::install_github("rauschenberger/cornet")
``` ```
Reference
---------
A Rauschenberger, and E Glaab (2019). "Lasso and ridge regression for dichotomised outcomes". Manuscript in preparation.
<!-- Generated by pkgdown: do not edit by hand -->
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Articles • cornet</title>
<!-- jquery -->
<script src="https://code.jquery.com/jquery-3.1.0.min.js" integrity="sha384-nrOSfDHtoPMzJHjVTdCopGqIqeYETSXhZDFyniQ8ZHcVy08QesyHcnOUpMpqnmWq" crossorigin="anonymous"></script>
<!-- Bootstrap -->
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js" integrity="sha384-Tc5IQib027qvyjSMfHjOMaLkfuWVxZxUPnCJA7l2mCWNIpG9mGCD8wGNIcPD7Txa" crossorigin="anonymous"></script>
<!-- Font Awesome icons -->
<link href="https://maxcdn.bootstrapcdn.com/font-awesome/4.6.3/css/font-awesome.min.css" rel="stylesheet" integrity="sha384-T8Gy5hrqNKT+hzMclPo118YTQO6cYprQmhrYwIiQ/3axmI1hQomh7Ud2hPOy8SP1" crossorigin="anonymous">
<!-- clipboard.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/1.7.1/clipboard.min.js" integrity="sha384-cV+rhyOuRHc9Ub/91rihWcGmMmCXDeksTtCihMupQHSsi8GIIRDG0ThDc3HGQFJ3" crossorigin="anonymous"></script>
<!-- sticky kit -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/sticky-kit/1.1.3/sticky-kit.min.js" integrity="sha256-c4Rlo1ZozqTPE2RLuvbusY3+SU1pQaJC0TjuhygMipw=" crossorigin="anonymous"></script>
<!-- pkgdown -->
<link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script>
<meta property="og:title" content="Articles" />
<!-- mathjax -->
<script src='https://mathjax.rstudio.com/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML'></script>
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container template-article-index">
<header>
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">cornet</a>
<span class="label label-default" data-toggle="tooltip" data-placement="bottom" title="Released package">0.0.1</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fa fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">
Articles
<span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<li>
<a href="../articles/vignette.html">Lasso and ridge regression for dichotomised outcomes</a>
</li>
</ul>
</li>
<li>
<a href="../news/index.html">Changelog</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li>
<a href="https://github.com/rauschenberger/colasso">
<span class="fa fa-github fa-lg"></span>
</a>
</li>
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
</header>
<div class="row">
<div class="col-md-9 contents">
<div class="page-header">
<h1>Articles</h1>
</div>
<div class="section ">
<h3>All vignettes</h3>
<p class="section-desc"></p>
<ul>
<li><a href="vignette.html">Lasso and ridge regression for dichotomised outcomes</a></li>
</ul>
</div>
</div>
</div>
<footer>
<div class="copyright">
<p>Developed by Armin Rauschenberger.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="http://pkgdown.r-lib.org/">pkgdown</a>.</p>
</div>
</footer>
</div>