#' @title Preprocess an Affymetrix dataset with SCAN. #' #' @description This function preprocess an Affymetrix dataset using SCAN and saves the #' results in a given TSV file. In addition, it returns the ESET object. #' #' The function assumes that a folder containing the raw data exists (as cel files). #' #' Note: the function does not check for the existence of folders or files. #' #' @param input_data_dir A string representing the folder that contains the input data. #' @param output_data_files An array of strings representing the files that should contain the #' preprocessed data. At least one value, maximum two if batch_correction is "BOTH". #' @param compressed A boolean representing whether the cel files are compressed. This #' is FALSE by default. #' @param batch_correction A String indicating whether batch correction should #' be performed. Options are "TRUE", "FALSE", "BOTH", default to "FALSE". #' @param batch_filename A string indicating where the batch information can be found, #' default to 'Batch.tsv'. #' @param clean_samples A boolean indicating whether the dataset should be cleaned by removing #' the samples that do not have clinical data. Default to FALSE. #' @param verbose A boolean representing whether the function should display log information. This #' is TRUE by default. #' @return The expression data as ESET objects. Potentially only one object (therefore unlisted). preprocess_data_affymetrix_scan <- function(input_data_dir, output_data_files, compressed = FALSE, batch_correction = "FALSE", batch_filename = "Batch.tsv", clean_samples = FALSE, verbose = TRUE) { # We define the I/Os. raw_data_input_dir <- paste0(input_data_dir, "RAW/") # We run the SCAN pre-processing method on the data. # We do not run the fast analysis (by default). input_data_regexp <- paste0(raw_data_input_dir, "*") remove(raw_data_input_dir) eset <- SCAN.UPC::SCAN(input_data_regexp, outFilePath = output_data_files[1]) # We remove the probes that have 0 variance accross the samples. exp_data <- Biobase::exprs(eset) probe_vars <- apply(exp_data, 1, var) probe_var_0 <- names(probe_vars[probe_vars == 0]) if (length(probe_var_0) > 0) { clean_probe_list <- setdiff(rownames(exp_data), probe_var_0) eset <- Biobase::ExpressionSet(exp_data[clean_probe_list, ]) remove(clean_probe_list) } remove(exp_data, probe_vars, probe_var_0) # We correct for the batch effect if necesary. eset_bc <- NULL if (batch_correction != "FALSE") { eset_bc <- correct_batch_effect(eset = eset, input_data_dir = input_data_dir) if (batch_correction == "TRUE") { eset <- eset_bc remove(eset_bc) } } else { remove(eset_bc) } # If necessary, we remove the samples that do not have clinical data. if (clean_samples) { # We load the clinical data as to get the samples to keep. samples <- rownames(Biobase::pData(ArrayUtils::load_clinical_data(input_data_dir, verbose = FALSE))) # We only keep the samples with clinical data. eset <- eset[, samples] if (batch_correction == "BOTH") { eset_bc <- eset_bc[, samples] } } # We save the eset_bc data as TSV file. ESET was already done as part of SCAN. if (batch_correction == "BOTH") { utils::write.table(Biobase::exprs(eset_bc), file = output_data_files[2], sep = "\t", quote = FALSE) } # We clean up and log information. rm(input_data_regexp) if (verbose == TRUE) { message(paste0("[", Sys.time(), "] Expression data pre-processed with SCAN.")) } # We return the created ESET(s). if (batch_correction == "BOTH") { return(list(eset, eset_bc)) } else { return(eset) } }