Commit db1b8bd2 authored by Armin Rauschenberger's avatar Armin Rauschenberger
Browse files

readme

parent 89f6b3ea
^Readme\.Rmd$
^\.travis\.yml$
^_pkgdown\.yml$
^docs$
^cran-comments\.md$
^appveyor\.yml$
^\.Rhistory$
^CRAN-RELEASE$
^Readme\.Rmd$
^\.travis\.yml$
^_pkgdown\.yml$
^docs$
^cran-comments\.md$
^appveyor\.yml$
^\.Rhistory$
^CRAN-RELEASE$
getwd()
setwd("/Volumes/KEY/joinet/analysis")
getwd()
setwd("/Volumes/KEY/joinet")
dir()
#<<start>>
load("results/simulation.RData")
cond <- lapply(loss,length)==2
if(any(!cond)){
warning("At least one error.",call.=FALSE)
grid <- grid[cond,]; loss <- loss[cond]
}
#--- computation time ---
time <- sapply(loss,function(x) unlist(x$time))
#round(sort(colMeans(apply(time,1,function(x) x/time["meta",]))),digits=1)
sort(round(rowMeans(time),digits=1))
#--- average ---
loss <- lapply(loss,function(x) x$loss)
loss
dim(loss)
cond
sum(cond)
mean(cond)
#<<start>>
load("results/simulation.RData")
cond <- lapply(loss,length)==2
if(any(!cond)){
warning("At least one error.",call.=FALSE)
grid <- grid[cond,]; loss <- loss[cond]
}
loss
mean <- sapply(loss,function(x) rowMeans(x))
#--- average ---
loss <- lapply(loss,function(x) x$loss)
loss
length(loss)
sapply(loss,function(x) rowMeans(x))
mean <- sapply(loss,function(x) rowMeans(x))
mean
dim(mean)
mean
prop <- sapply(loss[cond],function(x) rowMeans(100*x/matrix(x["none",],nrow=nrow(x),ncol=ncol(x),byrow=TRUE))[-nrow(x)]) # old (first re-scale, then average)
dim(prop)
rownames(mean)
mean[rownames(mean)!="none",]
mean["none",]
mean
mean["none",]
dim(mean)
matrix(mean["none",],nrow=nrow(mean),ncol=ncol(mean))
matrix(mean["none",],nrow=nrow(mean),ncol=ncol(mean),byrow=TRUE)
test <- mean[rownames(mean)!="none",]/matrix(mean["none",],nrow=nrow(mean)-1,ncol=ncol(mean),byrow=TRUE)
#<<start>>
load("results/simulation.RData")
cond <- lapply(loss,length)==2
if(any(!cond)){
warning("At least one error.",call.=FALSE)
grid <- grid[cond,]; loss <- loss[cond]
}
#--- computation time ---
time <- sapply(loss,function(x) unlist(x$time))
#round(sort(colMeans(apply(time,1,function(x) x/time["meta",]))),digits=1)
sort(round(rowMeans(time),digits=1))
#--- average ---
loss <- lapply(loss,function(x) x$loss)
prop <- sapply(loss[cond],function(x) rowMeans(100*x/matrix(x["none",],nrow=nrow(x),ncol=ncol(x),byrow=TRUE))[-nrow(x)]) # old (first re-scale, then average)
mean <- sapply(loss,function(x) rowMeans(x)) # new (first average)
test <- mean[rownames(mean)!="none",]/matrix(mean["none",],nrow=nrow(mean)-1,ncol=ncol(mean),byrow=TRUE)
prop
test
mean <- sapply(loss,function(x) rowMeans(x)) # new (first average)
test <- 100*mean[rownames(mean)!="none",]/matrix(mean["none",],nrow=nrow(mean)-1,ncol=ncol(mean),byrow=TRUE)
test
prop
cor(test,prop)
prop
test
prop
test
prop
i <- 1; j <- 2
warning(paste("Columns",i,"and",j,"are negatively correlated. Consider using constraint=FALSE."),call.=FALSE)
warning(paste("Columns",i,"and",j,"are negatively correlated. Consider using constraint=FALSE."),call.=FALSE)
?glmnet::glmnet
weight <- c(0,1,0,0,1)
cornet:::.check(x=weight,min=0,max=1,null=TRUE)
cornet:::.check
cornet:::.check(x=weight,type="matrix",min=0,max=1,null=TRUE)
weight
weight <- matrix(c(0,1,0,0,1,1),nrow=2)
cornet:::.check(x=weight,type="matrix",min=0,max=1,null=TRUE)
weight = NULL
cornet:::.check(x=weight,type="matrix",min=0,max=1,null=TRUE)
1/0
pkg <- "/Volumes/KEY/joinet/package"
setwd(dir=pkg)
system("ssh -T git@github.com")
system("git remote origin rauschenberger/joinet.git")
system("git remote rauschenberger/joinet.git")
#system("ssh -T git@github.com")
system("git remote origin rauschenberger/joinet.git")
#system("ssh -T git@github.com")
system("git remote origin github.com/rauschenberger/joinet.git")
#system("ssh -T git@github.com")
system("git remote origin https://github.com/rauschenberger/joinet.git")
system("git remote set-url origin https://rauschenberger:Juncker1419@github.com/rauschenberger/joinet.git")
#system("ssh -T git@github.com")
system("git remote origin https://github.com/rauschenberger/joinet.git")
setwd(pkg)
# new approach (ssh)
system("ssh -T git@github.com")
system("git remote set-url origin git@github.com:rauschenberger/joinet.git")
rm(list=ls())
knitr::opts_chunk$set(echo=TRUE,eval=FALSE)
#setwd("/Volumes/KEY/joinet")
#setwd("C:/Users/armin.rauschenberger/Desktop/joinet")
#source("C:/Users/armin.rauschenberger/Desktop/joinet/joinet/R/functions.R")
#pkgs <- c("devtools","missRanger","mice","mvtnorm","glmnet","earth","spls","MTPS","RMTL","MultivariateRandomForest","mcen","MRCE","remMap","SiER","GPM","MLPUGS","benchmarkme")
#install.packages(pkgs)
#devtools::install_github("rauschenberger/joinet")
#devtools::intall_github("cran/MRCE"); ; devtools::intall_github("cran/remMap")
#install.packages("joinet_0.0.X.tar.gz",repos=NULL,type="source")
#library(joinet)
dir()
getwd()
grid <- list()
grid$rho_x <- c(0.00,0.10,0.30)
grid$rho_b <- c(0.00,0.50,0.90)
delta <- 0.8
grid <- expand.grid(grid)
grid <- rbind(grid,grid,grid)
grid$p <- rep(c(10,500,500),each=nrow(grid)/3)
grid$nzero <- rep(c(5,5,100),each=nrow(grid)/3)
grid <- grid[rep(1:nrow(grid),times=10),]
n0 <- 100; n1 <- 10000
n <- n0 + n1
q <- 3
foldid.ext <- rep(c(0,1),times=c(n0,n1))
loss <- list(); cor <- numeric()
for(i in seq_len(nrow(grid))){
p <- grid$p[i]
set.seed(i)
cat("i =",i,"\n")
#--- features ---
mean <- rep(0,times=p)
sigma <- matrix(grid$rho_x[i],nrow=p,ncol=p)
diag(sigma) <- 1
X <- mvtnorm::rmvnorm(n=n,mean=mean,sigma=sigma)
#--- effects --- (multivariate Gaussian)
mean <- rep(0,times=q)
sigma <- matrix(data=grid$rho_b[i],nrow=q,ncol=q)
diag(sigma) <- 1
beta <- mvtnorm::rmvnorm(n=p,mean=mean,sigma=sigma)
#beta <- 1*apply(beta,2,function(x) x>(sort(x,decreasing=TRUE)[grid$nzero[i]])) # old (either zero or one)
beta <- 1*apply(beta,2,function(x) ifelse(x>sort(x,decreasing=TRUE)[m],x,0)) # new (either zero or non-zero)
#-- effects --- (multivariate binomial)
#sigma <- matrix(grid$rho_b[i],nrow=q,ncol=q); diag(sigma) <- 1
#beta <- bindata::rmvbin(n=p,margprob=rep(grid$prop[i],times=q),bincorr=sigma)
#--- outcomes ---
signal <- scale(X%*%beta)
signal[is.na(signal)] <- 0
noise <- matrix(rnorm(n*q),nrow=n,ncol=q)
Y <- sqrt(delta)*signal + sqrt(1-delta)*noise
# binomial: Y <- round(exp(Y)/(1+exp(Y)))
cors <- stats::cor(Y)
diag(cors) <- NA
cor[i] <- mean(cors,na.rm=TRUE)
#--- holdout ---
alpha.base <- 1*(grid$nzero[i] <= 10) # sparse vs dense
compare <- TRUE
loss[[i]] <- tryCatch(expr=cv.joinet(Y=Y,X=X,family="gaussian",compare=compare,foldid.ext=foldid.ext,alpha.base=alpha.base,alpha.meta=1,times=TRUE),error=function(x) NA)
}
install.packages("mvtnorm")
loss <- list(); cor <- numeric()
for(i in seq_len(nrow(grid))){
p <- grid$p[i]
set.seed(i)
cat("i =",i,"\n")
#--- features ---
mean <- rep(0,times=p)
sigma <- matrix(grid$rho_x[i],nrow=p,ncol=p)
diag(sigma) <- 1
X <- mvtnorm::rmvnorm(n=n,mean=mean,sigma=sigma)
#--- effects --- (multivariate Gaussian)
mean <- rep(0,times=q)
sigma <- matrix(data=grid$rho_b[i],nrow=q,ncol=q)
diag(sigma) <- 1
beta <- mvtnorm::rmvnorm(n=p,mean=mean,sigma=sigma)
#beta <- 1*apply(beta,2,function(x) x>(sort(x,decreasing=TRUE)[grid$nzero[i]])) # old (either zero or one)
beta <- 1*apply(beta,2,function(x) ifelse(x>sort(x,decreasing=TRUE)[m],x,0)) # new (either zero or non-zero)
#-- effects --- (multivariate binomial)
#sigma <- matrix(grid$rho_b[i],nrow=q,ncol=q); diag(sigma) <- 1
#beta <- bindata::rmvbin(n=p,margprob=rep(grid$prop[i],times=q),bincorr=sigma)
#--- outcomes ---
signal <- scale(X%*%beta)
signal[is.na(signal)] <- 0
noise <- matrix(rnorm(n*q),nrow=n,ncol=q)
Y <- sqrt(delta)*signal + sqrt(1-delta)*noise
# binomial: Y <- round(exp(Y)/(1+exp(Y)))
cors <- stats::cor(Y)
diag(cors) <- NA
cor[i] <- mean(cors,na.rm=TRUE)
#--- holdout ---
alpha.base <- 1*(grid$nzero[i] <= 10) # sparse vs dense
compare <- TRUE
loss[[i]] <- tryCatch(expr=cv.joinet(Y=Y,X=X,family="gaussian",compare=compare,foldid.ext=foldid.ext,alpha.base=alpha.base,alpha.meta=1,times=TRUE),error=function(x) NA)
}
i
p <- grid$p[i]
set.seed(i)
cat("i =",i,"\n")
#--- features ---
mean <- rep(0,times=p)
sigma <- matrix(grid$rho_x[i],nrow=p,ncol=p)
diag(sigma) <- 1
X <- mvtnorm::rmvnorm(n=n,mean=mean,sigma=sigma)
#--- effects --- (multivariate Gaussian)
mean <- rep(0,times=q)
sigma <- matrix(data=grid$rho_b[i],nrow=q,ncol=q)
diag(sigma) <- 1
beta <- mvtnorm::rmvnorm(n=p,mean=mean,sigma=sigma)
#beta <- 1*apply(beta,2,function(x) x>(sort(x,decreasing=TRUE)[grid$nzero[i]])) # old (either zero or one)
beta <- 1*apply(beta,2,function(x) ifelse(x>sort(x,decreasing=TRUE)[m],x,0)) # new (either zero or non-zero)
#beta <- 1*apply(beta,2,function(x) x>(sort(x,decreasing=TRUE)[grid$nzero[i]])) # old (either zero or one)
beta <- 1*apply(beta,2,function(x) ifelse(x>sort(x,decreasing=TRUE)[grid$nzero[i]],x,0)) # new (either zero or non-zero)
#--- outcomes ---
signal <- scale(X%*%beta)
signal[is.na(signal)] <- 0
noise <- matrix(rnorm(n*q),nrow=n,ncol=q)
Y <- sqrt(delta)*signal + sqrt(1-delta)*noise
# binomial: Y <- round(exp(Y)/(1+exp(Y)))
cors <- stats::cor(Y)
diag(cors) <- NA
cor[i] <- mean(cors,na.rm=TRUE)
#--- holdout ---
alpha.base <- 1*(grid$nzero[i] <= 10) # sparse vs dense
compare <- TRUE
loss[[i]] <- tryCatch(expr=cv.joinet(Y=Y,X=X,family="gaussian",compare=compare,foldid.ext=foldid.ext,alpha.base=alpha.base,alpha.meta=1,times=TRUE),error=function(x) NA)
loss[[i]]
cv.joinet(Y=Y,X=X,family="gaussian",compare=compare,foldid.ext=foldid.ext,alpha.base=alpha.base,alpha.meta=1,times=TRUE)
library(joinet)
cv.joinet(Y=Y,X=X,family="gaussian",compare=compare,foldid.ext=foldid.ext,alpha.base=alpha.base,alpha.meta=1,times=TRUE)
pkgs <- c("devtools","missRanger","mice","mvtnorm","glmnet","earth","spls","MTPS","RMTL","MultivariateRandomForest","mcen","MRCE","remMap","SiER","GPM","MLPUGS","benchmarkme")
install.packages(pkgs)
install.packages(pkgs)
devtools::intall_github("cran/MRCE")
devtools::install_github("cran/MRCE")
devtools::intall_github("cran/remMap")
devtools::install_github("cran/remMap")
devtools::install_github("cran/MRCE")
?install_github
devtools::install_github("cran/QUIC")
devtools::install_github("cran/MRCE")
# R for travis: see documentation at https://docs.travis-ci.com/user/languages/r
language: R
os:
- linux
sudo: false
cache: packages
use_bioc: true
bioc_required: true
r:
- devel
r_packages:
- covr
after_success:
- Rscript -e 'library(covr); codecov()'
# R for travis: see documentation at https://docs.travis-ci.com/user/languages/r
language: R
os:
- linux
sudo: false
cache: packages
use_bioc: true
bioc_required: true
r:
- devel
r_packages:
- covr
after_success:
- Rscript -e 'library(covr); codecov()'
This package was submitted to CRAN on 2021-03-09.
Once it is accepted, delete this file and tag the release (commit 0b88980).
This package was submitted to CRAN on 2021-03-09.
Once it is accepted, delete this file and tag the release (commit 0b88980).
Package: joinet
Version: 0.0.8
Title: Multivariate Elastic Net Regression
Description: Implements high-dimensional multivariate regression by stacked generalisation (Wolpert 1992 <doi:10.1016/S0893-6080(05)80023-1>). For positively correlated outcomes, a single multivariate regression is typically more predictive than multiple univariate regressions. Includes functions for model fitting, extracting coefficients, outcome prediction, and performance measurement. If required, install MRCE or remMap from GitHub (<https://github.com/cran/MRCE>, <https://github.com/cran/remMap>).
Depends: R (>= 3.0.0)
Imports: glmnet, palasso, cornet
Suggests: knitr, rmarkdown, testthat, MASS
Enhances: mice, earth, spls, MRCE, remMap, MultivariateRandomForest, SiER, mcen, GPM, RMTL, MTPS
Authors@R: person("Armin","Rauschenberger",email="armin.rauschenberger@uni.lu",role=c("aut","cre"))
VignetteBuilder: knitr
License: GPL-3
LazyData: true
Language: en-GB
RoxygenNote: 7.1.1
URL: https://github.com/rauschenberger/joinet
BugReports: https://github.com/rauschenberger/joinet/issues
Package: joinet
Version: 0.0.8
Title: Multivariate Elastic Net Regression
Description: Implements high-dimensional multivariate regression by stacked generalisation (Wolpert 1992 <doi:10.1016/S0893-6080(05)80023-1>). For positively correlated outcomes, a single multivariate regression is typically more predictive than multiple univariate regressions. Includes functions for model fitting, extracting coefficients, outcome prediction, and performance measurement. If required, install MRCE or remMap from GitHub (<https://github.com/cran/MRCE>, <https://github.com/cran/remMap>).
Depends: R (>= 3.0.0)
Imports: glmnet, palasso, cornet
Suggests: knitr, rmarkdown, testthat, MASS
Enhances: mice, earth, spls, MRCE, remMap, MultivariateRandomForest, SiER, mcen, GPM, RMTL, MTPS
Authors@R: person("Armin","Rauschenberger",email="armin.rauschenberger@uni.lu",role=c("aut","cre"))
VignetteBuilder: knitr
License: GPL-3
LazyData: true
Language: en-GB
RoxygenNote: 7.1.1
URL: https://github.com/rauschenberger/joinet
BugReports: https://github.com/rauschenberger/joinet/issues
# Generated by roxygen2: do not edit by hand
S3method(coef,joinet)
S3method(predict,joinet)
S3method(weights,joinet)
export(cv.joinet)
export(joinet)
importFrom(stats,weights)
# Generated by roxygen2: do not edit by hand
S3method(coef,joinet)
S3method(predict,joinet)
S3method(weights,joinet)
export(cv.joinet)
export(joinet)
importFrom(stats,weights)
## joinet 0.0.8 (2021-06-16)
* extension (with weights, without constraints)
## joinet 0.0.6 (2020-11-18)
* conditional execution
## joinet 0.0.5 (2020-10-02)
* updated documentation
## joinet 0.0.4 (2020-05-06)
* added competing models
## joinet 0.0.3 (2019-11-12)
* changed glmnet exports
## joinet 0.0.2 (2019-08-08)
* performance comparison
## joinet 0.0.1 (2019-08-03)
## joinet 0.0.8 (2021-06-16)
* extension (with weights, without constraints)
## joinet 0.0.6 (2020-11-18)
* conditional execution
## joinet 0.0.5 (2020-10-02)
* updated documentation
## joinet 0.0.4 (2020-05-06)
* added competing models
## joinet 0.0.3 (2019-11-12)
* changed glmnet exports
## joinet 0.0.2 (2019-08-08)
* performance comparison
## joinet 0.0.1 (2019-08-03)
* first submission
\ No newline at end of file
......@@ -17,12 +17,12 @@
#' @param Y
#' outputs\strong{:}
#' numeric matrix with \eqn{n} rows (samples)
#' and \eqn{q} columns (variables)
#' and \eqn{q} columns (outputs)
#'
#' @param X
#' inputs\strong{:}
#' numeric matrix with \eqn{n} rows (samples)
#' and \eqn{p} columns (variables)
#' and \eqn{p} columns (inputs)
#'
#' @param family
#' distribution\strong{:}
......@@ -51,13 +51,17 @@
#' elastic net mixing parameter for meta learners\strong{:}
#' numeric between \eqn{0} (ridge) and \eqn{1} (lasso)
#'
#' @param constraint
#' non-negativity constraints\strong{:}
#' logical (see details)
#'
#' @param weight
#' inclusion/exclusion of variables\strong{:}
#' logical matrix with \eqn{q} rows and \eqn{p} columns,
#' input-output effects\strong{:}
#' matrix with \eqn{p} rows (inputs) and \eqn{q} columns (outputs)
#' with entries \eqn{0} (exclude) and \eqn{1} (include),
#' or \code{NULL} (see details)
#'
#' @param sign
#' output-output effects\strong{:}
#' matrix with \eqn{q} rows ("meta-inputs") and \eqn{q} columns (outputs),
#' with entries \eqn{-1} (negative), \eqn{0} (none),
#' \eqn{1} (positive) and \eqn{NA} (any),
#' or \code{NULL} (see details)
#'
#' @param ...
......@@ -69,19 +73,46 @@
#' \emph{Manuscript in preparation}.
#'
#' @details
#' \strong{non-negativity constraints:}
#' TO BE DELETED:
#' constraint
#' non-negativity constraints\strong{:}
#' logical (see details)
#' non-negativity constraints\strong{:}
#' If it is reasonable to assume that the outcomes
#' are \emph{positively} correlated
#' (potentially after changing the sign of some outcomes)
#' we recommend to set \code{constraint=TRUE}.
#' Then non-negativity constraints are imposed on the meta learner.
#'
#' \strong{inclusion/exclusion of variables:}
#' The entry in the \eqn{j}th column and the \eqn{k}th row
#' indicates whether the \eqn{j}th feature may be used for
#' modelling the \eqn{k}th outcome
#' (where \eqn{0} means \code{FALSE} and
#' \eqn{1} means \code{TRUE}).
#' \strong{input-output relations:}
#' In this matrix with \eqn{p} rows and \eqn{q} columns,
#' the entry in the \eqn{j}th row and the \eqn{k}th column
#' indicates whether the \eqn{j}th input may be used for
#' modelling the \eqn{k}th output
#' (where \eqn{0} means yes and
#' \eqn{1} means no).
#' By default,
#'
#' \strong{output-output relations:}
#' In this matrix with \eqn{q} rows and \eqn{q} columns,
#' the entry in the \eqn{k}th row and the \eqn{l}th column
#' indicates how the \eqn{k}th output may be used for
#' modelling the \eqn{l}th output
#' (where \eqn{-1} means negative effect,
#' \eqn{0} means no effect,
#' \eqn{1} means positive effect,
#' and \eqn{NA} means any effect).
#' All entries on the diagonal must equal \eqn{1}.
#' By default (\code{sign=NULL}),
#' Kendall correlation determines this matrix,
#' with \eqn{-1} for significant negative, \eqn{0} for insignificant,
#' \eqn{1} for significant positive correlations.
#' The short-cuts \code{sign=1} and \code{code=NA} set all off-diagonal entries
#' equal to \eqn{1} (non-negativity constraints)
#' or \code{NA} (no constraints), respectively.
#' The former short-cut is useful if all pairs of outcomes
#' are assumed to be \emph{positively} correlated
#' (potentially after changing the sign of some outcomes).
#'
#' \strong{elastic net:}
#' \code{alpha.base} controls input-output effects,
......@@ -117,54 +148,120 @@
#' \dontrun{
#' browseVignettes("joinet") # further examples}
#'
joinet <- function(Y,X,family="gaussian",nfolds=10,foldid=NULL,type.measure="deviance",alpha.base=1,alpha.meta=1,constraint=TRUE,weight=NULL,...){
# IMPLEMENT CODE FOR CONSTRAINT AND WEIGHT!
joinet <- function(Y,X,family="gaussian",nfolds=10,foldid=NULL,type.measure="deviance",alpha.base=1,alpha.meta=1,weight=NULL,sign=NULL,...){
# VERIFY CODE FOR WEIGHT AND SIGN!
#--- temporary ---
# family <- "gaussian"; nfolds <- 10; foldid <- NULL; type.measure <- "deviance"; alpha.base <- alpha.meta <- 1; constraint <- TRUE; weight <- NULL
# family <- "gaussian"; nfolds <- 10; foldid <- NULL; type.measure <- "deviance"; alpha.base <- alpha.meta <- 1; weight <- NULL; sign <- NULL
#--- checks ---
Y <- as.matrix(Y)
X <- as.matrix(X)
cornet:::.check(x=Y,type="matrix",miss=TRUE)
if(constraint){
for(i in 1:(ncol(Y)-1)){
for(j in i:ncol(Y)){
cor <- stats::cor.test(Y[,i],Y[,j],use="pairwise.complete.obs")
if(cor$statistic<0 & cor$p.value<0.05){
warning(paste("Columns",i,"and",j,"are negatively correlated. Consider using constraint=FALSE."),call.=FALSE)
}
}
}
}
#if(constraint){
# for(i in 1:(ncol(Y)-1)){
# for(j in i:ncol(Y)){
# cor <- stats::cor.test(Y[,i],Y[,j],use="pairwise.complete.obs")
# if(cor$estimate<0 & cor$p.value<0.05){
# warning(paste("Columns",i,"and",j,"are negatively correlated. Consider using constraint=FALSE."),call.=FALSE)
# }
# }
# }
#}
#if(any(stats::cor(Y,use="pairwise.complete.obs")<0,na.rm=TRUE)){warning("Negative correlation!",call.=FALSE)}
cornet:::.check(x=X,type="matrix")
#cornet:::.check(x=family,type="vector",values=c("gaussian","binomial","poisson"))
if(nrow(Y)!=nrow(X)){stop("Contradictory sample size.",call.=FALSE)}
n <- nrow(Y); q <- ncol(Y); p <- ncol(X)
cornet:::.check(x=nfolds,type="scalar",min=3)
cornet:::.check(x=foldid,type="vector",values=seq_len(nfolds),null=TRUE)
cornet:::.check(x=type.measure,type="string",values=c("deviance","class","mse","mae")) # not auc (min/max confusion)
cornet:::.check(x=alpha.base,type="scalar",min=0,max=1)
cornet:::.check(x=alpha.meta,type="scalar",min=0,max=1)
cornet:::.check(x=weight,type="matrix",min=0,max=1,null=TRUE)
cornet:::.check(x=weight,type="matrix",min=0,max=1,null=TRUE,dim=c(p,q))
if(!is.null(c(list(...)$lower.limits,list(...)$upper.limits))){
stop("Reserved arguments \"lower.limits\" and \"upper.limits\".",call.=FALSE)
}
#--- dimensionality ---
n <- nrow(Y)
q <- ncol(Y)
p <- ncol(X)
if(is.null(weight)){
pf <- matrix(1,nrow=q,ncol=p)
pf <- matrix(1,nrow=p,ncol=q)
} else {
pf <- 1/weight
}
#--- constraints ---
null <- is.null(sign)
if(null){sign <- 0}
if(!is.matrix(sign)){
sign <- matrix(sign,nrow=q,ncol=q)
diag(sign) <- 1
}
if(any(diag(sign)!=1)){
warning("Matrix \"sign\" requires ones on the diagonal.",call.=FALSE)
}
temp <- sign[lower.tri(sign)|upper.tri(sign)]
if(!null & all(!is.na(temp)&temp==0)){
warning("Matrix \"sign\" only has zeros off the diagonal.",call.=FALSE)
}