Gitlab is now using https://gitlab.lcsb.uni.lu as it's primary address. Please update your bookmarks. FAQ.

Commit 476709d4 authored by Armin Rauschenberger's avatar Armin Rauschenberger
Browse files

competing models

parent 81c3c9f2
...@@ -447,10 +447,11 @@ print.joinet <- function(x,...){ ...@@ -447,10 +447,11 @@ print.joinet <- function(x,...){
#' between \eqn{1} and \code{nfolds.int}; #' between \eqn{1} and \code{nfolds.int};
#' or \code{NULL} #' or \code{NULL}
#' #'
#' @param mnorm,spls,mrce,sier,mtps,rmtl,gpm #' @param compare
#' experimental arguments\strong{:} #' experimental arguments\strong{:}
#' logical #' character vector with entries "mnorm", "spls", "mrce",
#' (\code{TRUE} requires packages \code{spls}, \code{MRCE}, \code{SiER}, \code{MTPS}, \code{RMTL} or \code{GPM}) #' "sier", "mtps", "rmtl", "gpm" and others
#' (requires packages \code{spls}, \code{MRCE}, \code{SiER}, \code{MTPS}, \code{RMTL} or \code{GPM})
#' #'
#' @param mice #' @param mice
#' missing data imputation\strong{:} #' missing data imputation\strong{:}
...@@ -514,10 +515,12 @@ print.joinet <- function(x,...){ ...@@ -514,10 +515,12 @@ print.joinet <- function(x,...){
#' set.seed(1) #' set.seed(1)
#' cv.joinet(Y=Y,X=X,alpha.base=0) # ridge} #' cv.joinet(Y=Y,X=X,alpha.base=0) # ridge}
#' #'
cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ext=NULL,foldid.int=NULL,type.measure="deviance",alpha.base=1,alpha.meta=1,mnorm=FALSE,spls=FALSE,mrce=FALSE,sier=FALSE,mtps=FALSE,rmtl=FALSE,gpm=FALSE,mice=FALSE,cvpred=FALSE,...){ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ext=NULL,foldid.int=NULL,type.measure="deviance",alpha.base=1,alpha.meta=1,compare=NULL,mice=FALSE,cvpred=FALSE,...){
# nfolds.ext <- 5; nfolds.int <- 10; foldid.ext <- foldid.int <- NULL; type.measure <- "deviance"; alpha.base <- 1; alpha.meta <- 1; mnorm <- spls <- mrce <- sier <- mtps <- rmtl <- gpm <- mice <- cvpred <- TRUE if(FALSE){
# nfolds.ext <- 1; foldid.ext <- fold; nfolds.int <- 10; foldid.int <- NULL nfolds.ext <- 5; nfolds.int <- 10; foldid.ext <- foldid.int <- NULL; type.measure <- "deviance"; alpha.base <- 1; alpha.meta <- 1; compare <- c("mnorm","spls","mars","mrf"); mice <- cvpred <- TRUE
nfolds.ext <- 1; foldid.ext <- fold; nfolds.int <- 10; foldid.int <- NULL
}
n <- nrow(Y) n <- nrow(Y)
q <- ncol(Y) q <- ncol(Y)
...@@ -538,27 +541,41 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex ...@@ -538,27 +541,41 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex
stop("Invalid argument family",call.=FALSE) stop("Invalid argument family",call.=FALSE)
} }
#--- checks --- # check packages
if(any(mnorm,spls,mrce,sier,gpm) & any(family!="gaussian")){ pkgs <- .packages(all.available=TRUE)
stop("\"mnorm\", \"spls\", \"mrce\" and \"sier\" require family \"gaussian\".",call.=FALSE) for(i in seq_along(compare)){
} pkg <- switch(compare[i],mnorm="glmnet",mars="earth",spls="spls",mtps="MTPS",
if(any(mtps,rmtl) & any(!family %in% c("gaussian","binomial"))){ rmtl="RMTL",mrf="MultivariateRandomForest",mcen="mcen",
stop("\"mtps\" and \"rmtl\" require family \"gaussian\" or \"binomial\".",call.=FALSE) map="remMap",stop("Invalid method.",call.FALSE))
if(!pkg %in% pkgs){
stop("Method \"",compare[i],"\" requires package \"",pkg,"\".",call.=FALSE)
}
} }
#--- checks ---
#if(any( & any(family!="gaussian")){
# stop("\"mnorm\", \"spls\", \"mrce\" and \"sier\" require family \"gaussian\".",call.=FALSE)
#}
#if(any(mtps,rmtl) & any(!family %in% c("gaussian","binomial"))){
# stop("\"mtps\" and \"rmtl\" require family \"gaussian\" or \"binomial\".",call.=FALSE)
#}
#--- cross-validated predictions --- #--- cross-validated predictions ---
models <- c("base","meta","mnorm"[mnorm],"spls"[spls],"mrce"[mrce],"sier"[sier],"mtps"[mtps],"rmtl"[rmtl],"gpm"[gpm],"none") models <- c("base","meta",compare,"none")
pred <- lapply(X=models,function(x) matrix(data=NA,nrow=n,ncol=q)) pred <- lapply(X=models,function(x) matrix(data=NA,nrow=n,ncol=q))
names(pred) <- models names(pred) <- models
for(i in seq_len(nfolds.ext)){ for(i in seq_len(nfolds.ext)){
Y0 <- Y[foldid.ext!=i,,drop=FALSE] Y0 <- Y[foldid.ext!=i,,drop=FALSE]
#Y1 <- Y[foldid.ext==i,,drop=FALSE]
X0 <- X[foldid.ext!=i,,drop=FALSE] X0 <- X[foldid.ext!=i,,drop=FALSE]
X1 <- X[foldid.ext==i,,drop=FALSE] X1 <- X[foldid.ext==i,,drop=FALSE]
# Remove constant features and impute missing values here?
# remove constant features
cond <- apply(X=X0,MARGIN=2,FUN=function(x) stats::sd(x)!=0)
X0 <- X0[,cond]; X1 <- X1[,cond]
if(is.null(foldid.int)){ if(is.null(foldid.int)){
foldid <- palasso:::.folds(y=Y0[,1],nfolds=nfolds.int) # temporary Y0[,1] foldid <- palasso:::.folds(y=Y0[,1],nfolds=nfolds.int) # temporary Y0[,1]
} else { } else {
...@@ -571,52 +588,47 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex ...@@ -571,52 +588,47 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex
pred$base[foldid.ext==i,] <- temp$base pred$base[foldid.ext==i,] <- temp$base
pred$meta[foldid.ext==i,] <- temp$meta pred$meta[foldid.ext==i,] <- temp$meta
# constant features, missing values # missing values
# Consider moving this upwards!
cond <- apply(X0,2,function(x) stats::sd(x)!=0)
x0 <- X0[,cond]
x1 <- X1[,cond]
if(mice & any(is.na(Y0))){ if(mice & any(is.na(Y0))){
if(requireNamespace("mice",quietly=TRUE)){ if(requireNamespace("mice",quietly=TRUE)){
y0 <- as.matrix(mice::complete(data=mice::mice(Y0,m=1,method="pmm",seed=1,printFlag=FALSE),action="all")[[1]]) y0 <- as.matrix(mice::complete(data=mice::mice(Y0,m=1,method="pmm",seed=1,printFlag=FALSE),action="all")[[1]])
} else { } else {
stop("mice=TRUE requires install.packages(\"mice\").",call.=FALSE) stop("Imputation by PMM requires install.packages(\"mice\").",call.=FALSE)
} }
} else { } else {
#y0 <- apply(X=Y0,MARGIN=2,FUN=function(x) ifelse(is.na(x),sample(x[!is.na(x)],size=1),x))
y0 <- apply(X=Y0,MARGIN=2,FUN=function(x) ifelse(is.na(x),stats::median(x[!is.na(x)]),x)) y0 <- apply(X=Y0,MARGIN=2,FUN=function(x) ifelse(is.na(x),stats::median(x[!is.na(x)]),x))
} }
all(Y0==y0,na.rm=TRUE) all(Y0==y0,na.rm=TRUE)
# other learners # other learners
if(mnorm){
if("mnorm" %in% compare){
net <- glmnet::cv.glmnet(x=X0,y=y0,family="mgaussian",foldid=foldid,alpha=alpha.base) # add ellipsis (...) net <- glmnet::cv.glmnet(x=X0,y=y0,family="mgaussian",foldid=foldid,alpha=alpha.base) # add ellipsis (...)
pred$mnorm[foldid.ext==i,] <- stats::predict(object=net,newx=X1,s="lambda.min",type="response") pred$mnorm[foldid.ext==i,] <- stats::predict(object=net,newx=X1,s="lambda.min",type="response")
} }
if(spls){
cv.spls <- spls::cv.spls(x=x0,y=y0,fold=nfolds.int,K=seq_len(10), if("mars" %in% compare){
eta=seq(from=0.1,to=0.9,by=0.1),plot.it=FALSE) if(all(family=="gaussian")){
spls <- spls::spls(x=x0,y=y0,K=cv.spls$K.opt,eta=cv.spls$eta.opt) object <- earth::earth(x=X0,y=y0)
pred$spls[foldid.ext==i,] <- spls::predict.spls(object=spls,newx=x1,type="fit") } else if(all(family=="binomial")){
} object <- earth::earth(x=X0,y=y0,glm=list(family=binomial))
if(mrce){ } else {
stop("MRCE not yet implemented",call.=FALSE) # bug? stop("Invalid.",call.=FALSE)
lam1 <- rev(10^seq(from=-2,to=0,by=0.5)) }
lam2 <- rev(10^seq(from=-2,to=0,by=0.5)) pred$mars[foldid.ext==i,] <- earth:::predict.earth(object=object,newdata=X1,type="response")
#lam1 <- lam2 <- 10^seq(from=0,to=-0.7,length.out=5)
#lam2 <- rev(10^seq(from=0,to=2,by=0.5)) #object <- mda::mars(x=X0,y=Y0)
#lam2 <- c(2,1,0.5) #pred$mars[foldid.ext==i,] <- mda:::predict.mars(object=object,newdata=X1)
#lam1 <- 10^seq(from=0,to=-5,length.out=11)
#lam2 <- seq(from=1,to=0.1,by=-0.1)
object <- MRCE::mrce(X=x0,Y=y0,lam1.vec=lam1,lam2.vec=lam2,method="cv",silent=FALSE,cov.tol=0.1,tol.out=1e-10)
pred$mrce[foldid.ext==i,] <- matrix(object$muhat,nrow=nrow(x1),ncol=q,byrow=TRUE) + x1 %*% object$Bhat
} }
if(sier){
stop("SiER not yet implemented",call.=FALSE) # slow! if("spls" %in% compare){
object <- SiER::cv.SiER(X=X0,Y=y0,K.cv=5,thres=0.5) cv.spls <- spls::cv.spls(x=X0,y=y0,fold=nfolds.int,K=seq_len(10),
pred$sier[foldid.ext==i,] <- SiER::pred.SiER(cv.fit=object,X.new=X1) eta=seq(from=0.1,to=0.9,by=0.1),plot.it=FALSE)
object <- spls::spls(x=X0,y=y0,K=cv.spls$K.opt,eta=cv.spls$eta.opt)
pred$spls[foldid.ext==i,] <- spls::predict.spls(object=object,newx=X1,type="fit")
} }
if(mtps){
if("mtps" %in% compare){
if(alpha.base==0){ if(alpha.base==0){
step1 <- MTPS::glmnet.ridge step1 <- MTPS::glmnet.ridge
} else if(alpha.base==1){ } else if(alpha.base==1){
...@@ -624,58 +636,138 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex ...@@ -624,58 +636,138 @@ cv.joinet <- function(Y,X,family="gaussian",nfolds.ext=5,nfolds.int=10,foldid.ex
} else { } else {
stop("MTPS requires alpha.base 0 or 1.",call.=FALSE) stop("MTPS requires alpha.base 0 or 1.",call.=FALSE)
} }
#body <- lapply(body(step1),function(x) x) #-------------------------------
#body(step1) <- substitute(body) #--- manual lambda.min start ---
#body(step1)[[6]][[3]][[2]][[3]] <- "lambda.min" #body <- body(step1)
#body[[6]][[3]][[2]][[3]] <- "lambda.min"
#body <- gsub(pattern="lambda.1se",replacement="lambda.min",x=body(step1)) #body[[7]][[3]][[4]] <- "lambda.min"
#body(step1)[[1]] #body[[8]][[3]][[3]][[2]][[4]] <- "lambda.min"
#body(step1) <- as.call(c(as.name("{"),body)) #body(step1) <- body
#body(step1) <- eval(parse(text=body)) #if(alpha.base==0){
#body(step1) <- substitute(eval(body)) # assignInNamespace(x="glmnet.ridge",value=step1,ns="MTPS")
#body(step1) <- substitute(expression(body)) #} else if(alpha.base==1){
#eval(parse(text = body)) # assignInNamespace(x="glmnet.lasso",value=step1,ns="MTPS")
#}
#--- manual lambda.min end ---
#-----------------------------
step2 <- MTPS::rpart1 step2 <- MTPS::rpart1
object <- MTPS::MTPS(xmat=X0,ymat=y0,family=family,nfold=nfolds.int, object <- MTPS::MTPS(xmat=X0,ymat=y0,family=family,nfold=nfolds.int,
method.step1=step1,method.step2=step2) method.step1=step1,method.step2=step2)
# nfold has no effect for cv=FALSE (default)
pred$mtps[foldid.ext==i,] <- MTPS::predict.MTPS(object=object,newdata=X1,type="response") pred$mtps[foldid.ext==i,] <- MTPS::predict.MTPS(object=object,newdata=X1,type="response")
# nfold has no effect for cv=FALSE (default)
} }
if(rmtl){
if("rmtl" %in% compare){
if(all(family=="gaussian")){ if(all(family=="gaussian")){
type <- "Regression" type <- "Regression"
Y0l <- lapply(seq_len(ncol(Y0)),function(i) Y0[,i,drop=FALSE]) y0l <- lapply(seq_len(ncol(y0)),function(i) y0[,i,drop=FALSE])
} else if(all(family=="binomial")){ } else if(all(family=="binomial")){
type <- "Classification" type <- "Classification"
Y0l <- lapply(seq_len(ncol(Y0)),function(i) 2*Y0[,i,drop=FALSE]-1) y0l <- lapply(seq_len(ncol(y0)),function(i) 2*y0[,i,drop=FALSE]-1)
} else { } else {
stop("RMTL requires either \"gaussian\" or \"binomial\".",call.=FALSE) stop("RMTL requires either \"gaussian\" or \"binomial\".",call.=FALSE)
} }
X0l <- lapply(seq_len(ncol(Y0)),function(i) X0) X0l <- lapply(seq_len(ncol(y0)),function(i) X0)
X1l <- lapply(seq_len(ncol(Y0)),function(i) X1) X1l <- lapply(seq_len(ncol(y0)),function(i) X1)
# Manually tune regularisation parameter lambda2! #---------------------------
# cv lambda2 start #--- manual tuning start ---
Lam2_seq <- 10^seq(from=2,to=-5,length.out=101) Lam2_seq <- 10^seq(from=2,to=-5,length.out=101)
cvm <- numeric() cvm <- numeric()
for(j in seq_along(Lam2_seq)){ for(j in seq_along(Lam2_seq)){
set.seed(1) set.seed(1)
cvm[j] <- min(RMTL::cvMTL(X=X0l,Y=Y0l,type=type,Lam1=0,Lam2=Lam2_seq[j])$cvm) cvm[j] <- min(RMTL::cvMTL(X=X0l,Y=y0l,type=type,Lam1=0,Lam2=Lam2_seq[j])$cvm)
} }
graphics::plot(Lam2_seq,cvm) #graphics::plot(x=Lam2_seq,y=cvm)
Lam2 <- Lam2_seq[which.min(cvm)] Lam2 <- Lam2_seq[which.min(cvm)]
# cv lambda2 end #--- manual tuning end ----
cvMTL <- RMTL::cvMTL(X=X0l,Y=Y0l,type=type,Lam2=Lam2) #--------------------------
MTL <- RMTL::MTL(X=X0l,Y=Y0l,type=type,Lam1=cvMTL$Lam1.min) cvMTL <- RMTL::cvMTL(X=X0l,Y=y0l,type=type,Lam2=Lam2)
MTL <- RMTL::MTL(X=X0l,Y=y0l,type=type,Lam1=cvMTL$Lam1.min)
temp <- RMTL:::predict.MTL(object=MTL,newX=X1l) temp <- RMTL:::predict.MTL(object=MTL,newX=X1l)
pred$rmtl[foldid.ext==i,] <- do.call(what="cbind",args=temp) pred$rmtl[foldid.ext==i,] <- do.call(what="cbind",args=temp)
} }
if(gpm){
if("mrf" %in% compare){
pred$mrf[foldid.ext==i,] <- MultivariateRandomForest::build_forest_predict(trainX=X0,trainY=Y0,
n_tree=10,m_feature=floor(ncol(X0)/3),min_leaf=5,testX=X1)
# use n_tree = 500
# alternative: IntegratedMRF
# Check why this does not work!
}
if("mcen" %in% compare){
if(all(family=="gaussian")){mfamily <- "mgaussian"}
if(all(family=="binomial")){mfamily <- "mbinomial"}
object <- mcen::cv.mcen(x=X0,y=y0,family=mfamily,folds=foldid,ky=1)
temp <- mcen:::predict.cv.mcen(object=object,newx=X1)
pred$mcen[foldid.ext==i,] <- as.matrix(temp)
# default returns error (thus ky=1)
}
if("map" %in% compare){
#if(alpha.base==0){
# lamL1.v <- 0
# lamL2.v <- fit$base$y1$lambda
#} else if(alpha.base==1){
# lamL1.v <- fit$base$y1$lambda
# lamL2.v <- 0
#} else {
# stop("")
#}
mean <- colMeans(y0)
y0s <- y0-matrix(data=mean,nrow=nrow(X0),ncol=ncol(y0),byrow=TRUE)
temp <- glmnet::glmnet(x=X0,y=y0s,family="mgaussian",intercept=FALSE)
lamL1.v <- exp(seq(from=log(min(temp$lambda)),to=log(max(temp$lambda)),length.out=30))
lamL2.v <- seq(from=0,to=5,by=1) # TEMPORARY
cv <- remMap::remMap.CV(X=X0,Y=y0s,lamL1.v=lamL1.v,lamL2.v=lamL2.v)
#graphics::plot(x=lamL1.v,y=log(as.numeric(cv$ols.cv[,3])))
index <- which(cv$ols.cv==min(cv$ols.cv),arr.ind=TRUE)
object <- remMap::remMap(X.m=X0,Y.m=y0s,lamL1=lamL1.v[index[1]],lamL2=lamL2.v[index[2]])
pred$map[foldid.ext==i,] <- matrix(data=mean,nrow=nrow(X1),ncol=ncol(y0),byrow=TRUE) + X1 %*% object$phi
# alternative: groupRemMap
}
if(FALSE){ #"mrce" %in% compare
# bug?
lam1 <- rev(10^seq(from=-2,to=0,by=0.5))
lam2 <- rev(10^seq(from=-2,to=0,by=0.5))
#lam1 <- lam2 <- 10^seq(from=0,to=-0.7,length.out=5)
#lam2 <- rev(10^seq(from=0,to=2,by=0.5))
#lam2 <- c(2,1,0.5)
#lam1 <- 10^seq(from=0,to=-5,length.out=11)
#lam2 <- seq(from=1,to=0.1,by=-0.1)
object <- MRCE::mrce(X=X0,Y=y0,lam1.vec=lam1,lam2.vec=lam2,method="cv",silent=FALSE,cov.tol=0.1,tol.out=1e-10)
pred$mrce[foldid.ext==i,] <- matrix(object$muhat,nrow=nrow(X1),ncol=q,byrow=TRUE) + x1 %*% object$Bhat
}
if(FALSE){ # "sier" %in% compare
stop("SiER not yet implemented",call.=FALSE) # slow!
object <- SiER::cv.SiER(X=X0,Y=y0,K.cv=5,thres=0.5)
pred$sier[foldid.ext==i,] <- SiER::pred.SiER(cv.fit=object,X.new=X1)
}
if(FALSE){ #"gpm" %in% compare
if(any(family!="gaussian")){ if(any(family!="gaussian")){
stop("GPM requires \"gaussian\" family.",call.=FALSE) stop("GPM requires \"gaussian\" family.",call.=FALSE)
} }
object <- GPM::Fit(X=X0,Y=Y0) object <- GPM::Fit(X=X0,Y=Y0)
pred$gpm[foldid.ext==i,] <- GPM::Predict(XF=X1,Model=object)$YF pred$gpm[foldid.ext==i,] <- GPM::Predict(XF=X1,Model=object)$YF
# Check why this does not work!
}
if(FALSE){ "MSGLasso" %in% compare
MSGLasso::MSGLasso.cv(X=X0,Y=Y0)
# requires many user inputs
}
if(FALSE){
# PMA: not for prediction
}
if(FALSE){ # MBSP (not for hd data)
# intercept?
object <- MBSP::mbsp.tpbn(X=X0,Y=Y0)
X1 %*% object$B
} }
if(FALSE){ # bgsmtr (for SNPs only? error!) if(FALSE){ # bgsmtr (for SNPs only? error!)
......
...@@ -169,9 +169,69 @@ ...@@ -169,9 +169,69 @@
```r ```r
#install.packages("MTPS") #install.packages("MTPS")
data("HIV",package="MTPS",verbose=TRUE) data("HIV",package="MTPS")
loss1 <- cv.joinet(Y=YY,X=XX,mnorm=TRUE,mtps=TRUE) loss1 <- cv.joinet(Y=YY,X=XX,mnorm=TRUE,spls=TRUE,mtps=TRUE)
#install.packages("spls")
data(yeast,package="spls")
loss2 <- cv.joinet(Y=yeast$y,X=yeast$x,mnorm=TRUE,spls=TRUE,mtps=TRUE)
data(mice,package="spls")
loss3 <- cv.joinet(Y=mice$y,X=mice$x,mnorm=TRUE,spls=TRUE,mtps=TRUE)
# install.packages("MRCE")
data(stock04,package="MRCE",verbose=TRUE)
# otherwise simulated
#install.packages("SiER")
# simulated!
library(MASS)
total.noise <- 0.1
rho <- 0.3
rho.e <- 0.2
nvar=500
nvarq <- 3
sigma2 <- total.noise/nvarq
sigmaX=0.1
nvar.eff=150
Sigma=matrix(0,nvar.eff,nvar.eff)
for(i in 1:nvar.eff){
for(j in 1:nvar.eff){
Sigma[i,j]=rho^(abs(i-j))
}
}
Sigma2.y <- matrix(sigma2*rho.e,nvarq, nvarq)
diag(Sigma2.y) <- sigma2
betas.true <- matrix(0, nvar, 3)
betas.true[1:15,1]=rep(1,15)/sqrt(15)
betas.true[16:45,2]=rep(0.5,30)/sqrt(30)
betas.true[46:105,3]=rep(0.25,60)/sqrt(60)
ntest <- 500
ntrain <- 90
ntot <- ntest+ntrain
X <- matrix(0,ntot,nvar)
X[,1:nvar.eff] <- mvrnorm(n=ntot, rep(0, nvar.eff), Sigma)
X[,-(1:nvar.eff)] <- matrix(sigmaX*rnorm((nvar-nvar.eff)*dim(X)[1]),
dim(X)[1],(nvar-nvar.eff))
Y <- X%*%betas.true
Y <- Y+mvrnorm(n=ntot, rep(0,nvarq), Sigma2.y)
fold <- rep(c(0,1),times=c(ntrain,ntest))
loss4 <- cv.joinet(Y=Y,X=X,foldid.ext=fold,mnorm=TRUE,spls=TRUE,mtps=TRUE)
#install.pacakges("GPM")
# simulated!
#install.packages("RMTL")
# simulated!
data <- RMTL::Create_simulated_data(Regularization="L21", #type="Regression")
#Y <- (do.call(what="cbind",args=data$Y)+1)/2
#X <- data$X[[1]] # example
#loss2 <- cv.joinet(Y=Y,X=X,mnorm=TRUE,spls=TRUE,mtps=TRUE)
```
-->
<!--
```r
#install.packages("plsgenomics") #install.packages("plsgenomics")
data(Ecoli,package="plsgenomics") data(Ecoli,package="plsgenomics")
X <- Ecoli$CONNECdata X <- Ecoli$CONNECdata
......
...@@ -4,5 +4,5 @@ pkgdown_sha: ~ ...@@ -4,5 +4,5 @@ pkgdown_sha: ~
articles: articles:
article: article.html article: article.html
joinet: joinet.html joinet: joinet.html
last_built: 2020-06-05T12:17Z last_built: 2020-06-08T15:55Z
...@@ -136,14 +136,8 @@ ...@@ -136,14 +136,8 @@
<span class='kw'>foldid.int</span> <span class='kw'>=</span> <span class='kw'>NULL</span>, <span class='kw'>foldid.int</span> <span class='kw'>=</span> <span class='kw'>NULL</span>,
<span class='kw'>type.measure</span> <span class='kw'>=</span> <span class='st'>"deviance"</span>, <span class='kw'>type.measure</span> <span class='kw'>=</span> <span class='st'>"deviance"</span>,
<span class='kw'>alpha.base</span> <span class='kw'>=</span> <span class='fl'>1</span>, <span class='kw'>alpha.base</span> <span class='kw'>=</span> <span class='fl'>1</span>,
<span class='kw'>alpha.meta</span> <span class='kw'>=</span> <span class='fl'>0</span>, <span class='kw'>alpha.meta</span> <span class='kw'>=</span> <span class='fl'>1</span>,
<span class='kw'>mnorm</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>, <span class='kw'>compare</span> <span class='kw'>=</span> <span class='kw'>NULL</span>,
<span class='kw'>spls</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>mrce</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>sier</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>mtps</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>rmtl</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>gpm</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>mice</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>, <span class='kw'>mice</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='kw'>cvpred</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>, <span class='kw'>cvpred</span> <span class='kw'>=</span> <span class='fl'>FALSE</span>,
<span class='no'>...</span> <span class='no'>...</span>
...@@ -211,10 +205,11 @@ numeric between \(0\) (ridge) and \(1\) (lasso)</p></td> ...@@ -211,10 +205,11 @@ numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
numeric between \(0\) (ridge) and \(1\) (lasso)</p></td> numeric between \(0\) (ridge) and \(1\) (lasso)</p></td>
</tr> </tr>
<tr> <tr>
<th>mnorm, spls, mrce, sier, mtps, rmtl, gpm</th> <th>compare</th>
<td><p>experimental arguments<strong>:</strong> <td><p>experimental arguments<strong>:</strong>
logical character vector with entries "mnorm", "spls", "mrce",
(<code>TRUE</code> requires packages <code>spls</code>, <code>MRCE</code>, <code>SiER</code>, <code>MTPS</code>, <code>RMTL</code> or <code>GPM</code>)</p></td> "sier", "mtps", "rmtl", "gpm" and others
(requires packages <code>spls</code>, <code>MRCE</code>, <code>SiER</code>, <code>MTPS</code>, <code>RMTL</code> or <code>GPM</code>)</p></td>
</tr> </tr>
<tr> <tr>
<th>mice</th> <th>mice</th>
......
...@@ -14,14 +14,8 @@ cv.joinet( ...@@ -14,14 +14,8 @@ cv.joinet(
foldid.int = NULL, foldid.int = NULL,
type.measure = "deviance", type.measure = "deviance",
alpha.base = 1, alpha.base = 1,
alpha.meta = 0, alpha.meta = 1,
mnorm = FALSE, compare = NULL,
spls = FALSE,
mrce = FALSE,
sier = FALSE,
mtps = FALSE,
rmtl = FALSE,
gpm = FALSE,
mice = FALSE, mice = FALSE,
cvpred = FALSE, cvpred = FALSE,
... ...
...@@ -66,9 +60,10 @@ numeric between \eqn{0} (ridge) and \eqn{1} (lasso)} ...@@ -66,9 +60,10 @@ numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{alpha.meta}{elastic net mixing parameter for meta learner\strong{:} \item{alpha.meta}{elastic net mixing parameter for meta learner\strong{:}
numeric between \eqn{0} (ridge) and \eqn{1} (lasso)} numeric between \eqn{0} (ridge) and \eqn{1} (lasso)}
\item{mnorm, spls, mrce, sier, mtps, rmtl, gpm}{experimental arguments\strong{:} \item{compare}{experimental arguments\strong{:}
logical character vector with entries "mnorm", "spls", "mrce",
(\code{TRUE} requires packages \code{spls}, \code{MRCE}, \code{SiER}, \code{MTPS}, \code{RMTL} or \code{GPM})} "sier", "mtps", "rmtl", "gpm" and others
(requires packages \code{spls}, \code{MRCE}, \code{SiER}, \code{MTPS}, \code{RMTL} or \code{GPM})}
\item{mice}{missing data imputation\strong{:} \item{mice}{missing data imputation\strong{:}
logical (\code{mice=TRUE} requires package \code{mice})} logical (\code{mice=TRUE} requires package \code{mice})}
......
...@@ -125,12 +125,73 @@ cv.joinet(Y=Y,X=X,family=family) ...@@ -125,12 +125,73 @@ cv.joinet(Y=Y,X=X,family=family)
Armin Rauschenberger and Enrico Glaab (2020). "joinet: predicting correlated outcomes jointly to improve clinical prognosis". *Manuscript in preparation.* Armin Rauschenberger and Enrico Glaab (2020). "joinet: predicting correlated outcomes jointly to improve clinical prognosis". *Manuscript in preparation.*
<!-- <!--
```{r,eval=FALSE} ```{r,eval=FALSE}
#install.packages("MTPS") #install.packages("MTPS")
data("HIV",package="MTPS",verbose=TRUE) data("HIV",package="MTPS")
loss1 <- cv.joinet(Y=YY,X=XX,mnorm=TRUE,mtps=TRUE) loss1 <- cv.joinet(Y=YY,X=XX,mnorm=TRUE,spls=TRUE,mtps=TRUE)
#install.packages("spls")
data(yeast,package="spls")
loss2 <- cv.joinet(Y=yeast$y,X=yeast$x,mnorm=TRUE,spls=TRUE,mtps=TRUE)
data(mice,package="spls")
loss3 <- cv.joinet(Y=mice$y,X=mice$x,mnorm=TRUE,spls=TRUE,mtps=TRUE)
# install.packages("MRCE")
data(stock04,package="MRCE",verbose=TRUE)
# otherwise simulated
#install.packages("SiER")
# simulated!
library(MASS)
total.noise <- 0.1
rho <- 0.3
rho.e <- 0.2
nvar=500
nvarq <- 3
sigma2 <- total.noise/nvarq