Gitlab is now using https://gitlab.lcsb.uni.lu as it's primary address. Please update your bookmarks. FAQ.

Commit 8f88fd0b authored by Armin Rauschenberger's avatar Armin Rauschenberger
Browse files

automation

parent 5b89a40c
......@@ -44,7 +44,7 @@
#'
#' @param alpha.meta
#' elastic net mixing parameter for meta learner\strong{:}
#' numeric between \eqn{0} (ridge) and \eqn{1} (lasso),
#' numeric between \eqn{0} (ridge) and \eqn{1} (lasso)
#'
#' @param ...
#' further arguments passed to \code{\link[glmnet]{glmnet}}
......@@ -58,13 +58,19 @@
#' The \eqn{q} outcomes should be positively correlated.
#' Avoid negative correlations by changing the sign of the variable.
#'
#' elastic net mixing parameters:
#' \code{alpha.base} controls input-output effects,
#' \code{alpha.meta} controls output-output effects;
#' ridge (\eqn{0}) renders dense models,
#' lasso (\eqn{1}) renders sparse models
#'
#' @examples
#' n <- 30; q <- 2; p <- 20
#' Y <- matrix(rnorm(n*q),nrow=n,ncol=q)
#' X <- matrix(rnorm(n*p),nrow=n,ncol=p)
#' object <- mixnet(Y=Y,X=X)
#'
mixnet <- function(Y,X,family="gaussian",nfolds=10,foldid=NULL,type.measure="deviance",alpha.base=1,alpha.meta=0,...){
mixnet <- function(Y,X,family="gaussian",nfolds=10,foldid=NULL,type.measure="deviance",alpha.base=0,alpha.meta=0,...){
#--- temporary ---
# family <- "gaussian"; nfolds <- 10; foldid <- NULL; type.measure <- "deviance"
......@@ -256,7 +262,6 @@ predict.mixnet <- function(object,newx,type="response",...){
list <- list(base=base,meta=meta)
if(type=="response"){
for(i in seq_len(q)){
base[,i] <- .mean.function(x=base[,i],family=x$info$family[i])
......
......@@ -181,7 +181,7 @@ numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
<tr>
<th>alpha.meta</th>
<td><p>elastic net mixing parameter for meta learner<strong>:</strong>
numeric between \(0\) (ridge) and \(1\) (lasso),</p></td>
numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
</tr>
<tr>
<th>mnorm</th>
......
......@@ -118,7 +118,7 @@
</div>
<pre class="usage"><span class='fu'>mixnet</span>(<span class='no'>Y</span>, <span class='no'>X</span>, <span class='kw'>family</span> <span class='kw'>=</span> <span class='st'>"gaussian"</span>, <span class='kw'>nfolds</span> <span class='kw'>=</span> <span class='fl'>10</span>, <span class='kw'>foldid</span> <span class='kw'>=</span> <span class='kw'>NULL</span>,
<span class='kw'>type.measure</span> <span class='kw'>=</span> <span class='st'>"deviance"</span>, <span class='kw'>alpha.base</span> <span class='kw'>=</span> <span class='fl'>1</span>, <span class='kw'>alpha.meta</span> <span class='kw'>=</span> <span class='fl'>0</span>, <span class='no'>...</span>)</pre>
<span class='kw'>type.measure</span> <span class='kw'>=</span> <span class='st'>"deviance"</span>, <span class='kw'>alpha.base</span> <span class='kw'>=</span> <span class='fl'>0</span>, <span class='kw'>alpha.meta</span> <span class='kw'>=</span> <span class='fl'>0</span>, <span class='no'>...</span>)</pre>
<h2 class="hasAnchor" id="arguments"><a class="anchor" href="#arguments"></a>Arguments</h2>
<table class="ref-arguments">
......@@ -167,7 +167,7 @@ numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
<tr>
<th>alpha.meta</th>
<td><p>elastic net mixing parameter for meta learner<strong>:</strong>
numeric between \(0\) (ridge) and \(1\) (lasso),</p></td>
numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
</tr>
<tr>
<th>...</th>
......@@ -179,6 +179,11 @@ numeric between \(0\) (ridge) and \(1\) (lasso),</p></td>
<p>The \(q\) outcomes should be positively correlated.
Avoid negative correlations by changing the sign of the variable.</p>
<p>elastic net mixing parameters:
<code>alpha.base</code> controls input-output effects,
<code>alpha.meta</code> controls output-output effects;
ridge (\(0\)) renders dense models,
lasso (\(1\)) renders sparse models</p>
<h2 class="hasAnchor" id="references"><a class="anchor" href="#references"></a>References</h2>
......
......@@ -141,9 +141,9 @@ i.e. the weights for the base learners.</p>
<span class='no'>Y</span> <span class='kw'>&lt;-</span> <span class='fu'>matrix</span>(<span class='fu'>rnorm</span>(<span class='no'>n</span>*<span class='no'>q</span>),<span class='kw'>nrow</span><span class='kw'>=</span><span class='no'>n</span>,<span class='kw'>ncol</span><span class='kw'>=</span><span class='no'>q</span>)
<span class='no'>X</span> <span class='kw'>&lt;-</span> <span class='fu'>matrix</span>(<span class='fu'>rnorm</span>(<span class='no'>n</span>*<span class='no'>p</span>),<span class='kw'>nrow</span><span class='kw'>=</span><span class='no'>n</span>,<span class='kw'>ncol</span><span class='kw'>=</span><span class='no'>p</span>)
<span class='no'>object</span> <span class='kw'>&lt;-</span> <span class='fu'><a href='mixnet.html'>mixnet</a></span>(<span class='kw'>Y</span><span class='kw'>=</span><span class='no'>Y</span>,<span class='kw'>X</span><span class='kw'>=</span><span class='no'>X</span>)</div><div class='output co'>#&gt; <span class='warning'>Warning: Negative correlation!</span></div><div class='input'><span class='fu'>weights</span>(<span class='no'>object</span>)</div><div class='output co'>#&gt; y1 y2
#&gt; (Intercept) 0.283699496 -8.542418e-02
#&gt; V1 0.001318683 3.818776e-38
#&gt; V2 0.015338791 0.000000e+00</div><div class='input'>
#&gt; (Intercept) 2.827364e-01 -0.08542418
#&gt; V1 3.158520e-37 0.00000000
#&gt; V2 3.171601e-36 0.00000000</div><div class='input'>
</div></pre>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="sidebar">
......
......@@ -46,7 +46,7 @@ vector of length \eqn{1} or \eqn{q} with entries
numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{alpha.meta}{elastic net mixing parameter for meta learner\strong{:}
numeric between \eqn{0} (ridge) and \eqn{1} (lasso),}
numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{mnorm}{multivariate normal regression\strong{:}
logical}
......
......@@ -5,7 +5,7 @@
\title{Multivariate Elastic Net Regression}
\usage{
mixnet(Y, X, family = "gaussian", nfolds = 10, foldid = NULL,
type.measure = "deviance", alpha.base = 1, alpha.meta = 0, ...)
type.measure = "deviance", alpha.base = 0, alpha.meta = 0, ...)
}
\arguments{
\item{Y}{outputs\strong{:}
......@@ -36,7 +36,7 @@ vector of length \eqn{1} or \eqn{q} with entries
numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{alpha.meta}{elastic net mixing parameter for meta learner\strong{:}
numeric between \eqn{0} (ridge) and \eqn{1} (lasso),}
numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{...}{further arguments passed to \code{\link[glmnet]{glmnet}}}
}
......@@ -46,6 +46,12 @@ Implements multivariate elastic net regression.
\details{
The \eqn{q} outcomes should be positively correlated.
Avoid negative correlations by changing the sign of the variable.
elastic net mixing parameters:
\code{alpha.base} controls input-output effects,
\code{alpha.meta} controls output-output effects;
ridge (\eqn{0}) renders dense models,
lasso (\eqn{1}) renders sparse models
}
\examples{
n <- 30; q <- 2; p <- 20
......
#--- Initialisation ---
set.seed(1)
n <- 30; q <- 3; p <- 20
Y <- matrix(c(rnorm(n),rbinom(n,size=1,prob=0.5),rpois(n,lambda=4)),
......@@ -6,14 +8,19 @@ Y <- matrix(c(rnorm(n),rbinom(n,size=1,prob=0.5),rpois(n,lambda=4)),
X <- matrix(rnorm(n*p),nrow=n,ncol=p)
family <- c("gaussian","binomial","poisson")
foldid <- palasso:::.folds(y=Y[,2],nfolds=5)
object <- mixnet::mixnet(Y=Y,X=X,family=family,foldid=foldid)
glmnet <- list()
for(i in seq_len(q)){
glmnet[[i]] <- glmnet::cv.glmnet(x=X,y=Y[,i],family=family[i],foldid=foldid)
}
for(alpha in c(0.05,0.95)){
object <- mixnet::mixnet(Y=Y,X=X,family=family,alpha.base=alpha,foldid=foldid)
glmnet <- list()
for(i in seq_len(q)){
glmnet[[i]] <- glmnet::cv.glmnet(x=X,y=Y[,i],family=family[i],alpha=alpha,foldid=foldid,)
}
#--- Equality glmnet and mixnet ---
testthat::test_that("lambda: glmnet = stacknet",{
testthat::test_that("lambda: glmnet = mixnet",{
for(i in seq_len(q)){
a <- glmnet[[i]]$lambda
b <- object$base[[i]]$lambda
......@@ -21,9 +28,18 @@ testthat::test_that("lambda: glmnet = stacknet",{
cond <- all(a[seq_len(max)]==b[seq_len(max)])
testthat::expect_true(cond)
}
})
})
testthat::test_that("cvm: glmnet = stacknet",{
testthat::test_that("lambda.min: glmnet = mixnet",{
for(i in seq_len(q)){
a <- glmnet[[i]]$lambda.min
b <- object$base[[i]]$lambda.min
cond <- (a==b)
testthat::expect_true(cond)
}
})
testthat::test_that("cvm: glmnet = mixnet",{
for(i in seq_len(q)){
a <- glmnet[[i]]$cvm
b <- object$base[[i]]$cvm
......@@ -31,9 +47,9 @@ testthat::test_that("cvm: glmnet = stacknet",{
cond <- all(abs(a[seq_len(max)]-b[seq_len(max)])<1e-06)
testthat::expect_true(cond)
}
})
})
testthat::test_that("glmnet.fit: glmnet = stacknet",{
testthat::test_that("glmnet.fit: glmnet = mixnet",{
for(i in seq_len(q)){
a <- glmnet[[i]]$glmnet.fit
b <- object$base[[i]]$glmnet.fit
......@@ -43,9 +59,22 @@ testthat::test_that("glmnet.fit: glmnet = stacknet",{
testthat::expect_true(cond)
}
}
})
})
#--- Coherence mixnet ---
testthat::test_that("stacking = pooling",{
testthat::test_that("predict: glmnet = mixnet",{
a <- mixnet:::predict.mixnet(object=object,newx=X)$base
for(i in seq_len(q)){
b <- stats::predict(object=glmnet[[i]],newx=X,type="response",s="lambda.min")
cond <- all(a[,i]==b)
testthat::expect_true(cond)
}
})
#--- Equivalence stacking and pooling ---
testthat::test_that("stacking = pooling",{
pred0 <- mixnet:::predict.mixnet(object,newx=X)$meta
coef <- mixnet:::coef.mixnet(object)
pred1 <- matrix(data=NA,nrow=n,ncol=q)
......@@ -54,4 +83,6 @@ testthat::test_that("stacking = pooling",{
}
cond <- all(abs(pred0-pred1)<1e-06)
testthat::expect_true(cond)
})
})
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment