# HG changeset patch
# User ethevenot
# Date 1470499277 14400
# Node ID b4f5b5bc01dd3dd23aa422f9eca1de0777ed5657
planemo upload for repository https://github.com/workflow4metabolomics/qualitymetrics.git commit 73366dd3473c509341ab9ba1df8ba748d08a50a1
diff -r 000000000000 -r b4f5b5bc01dd README.md
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README.md Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,73 @@
+Metrics and graphics to assess the quality of the data
+======================================================
+
+A Galaxy module from the [Workflow4metabolomics](http://workflow4metabolomics.org) infrastructure
+
+Status: [![Build Status](https://travis-ci.org/workflow4metabolomics/qualitymetrics.svg?branch=master)](https://travis-ci.org/workflow4metabolomics/qualitymetrics).
+
+### Description
+
+**Version:** 2.2.4
+**Date:** 2016-08-04
+**Author:** Marion Landi (INRA, PFEM), Mélanie Pétéra (INRA, PFEM), and Etienne A. Thévenot (CEA, LIST)
+**Email:** [melanie.petera(at)clermont.inra.fr](mailto:melanie.petera@clermont.inra.fr), [etienne.thevenot(at)cea.fr](mailto:etienne.thevenot@cea.fr)
+**Citation:**
+**Licence:** CeCILL
+**Reference history:** [W4M00001b_sacurine-complete](http://galaxy.workflow4metabolomics.org/history/list_published)
+**Funding:** Agence Nationale de la Recherche ([MetaboHUB](http://www.metabohub.fr/index.php?lang=en&Itemid=473) national infrastructure for metabolomics and fluxomics, ANR-11-INBS-0010 grant)
+
+### Installation
+
+* Configuration file:
+ + `qualitymetrics_config.xml`
+* Image files:
+ + `static/images/QualityControl.png`
+ + `static/images/qualitymetrics_workingExampleImage.png`
+* Wrapper file:
+ + `qualitymetrics_wrapper.R`
+* Script file:
+ + `qualitymetrics_script.R`
+* R packages
+ + **batch** from CRAN
+
+ ```r
+ install.packages("batch", dep=TRUE)
+ ```
+
+ + **ropls** from Bioconductor
+
+ ```r
+ source("http://www.bioconductor.org/biocLite.R")
+ biocLite("ropls")
+ ```
+
+### Tests
+
+The code in the wrapper can be tested by running the `runit/qualitymetrics_runtests.R` R file
+
+You will need to install **RUnit** package in order to make it run:
+```r
+install.packages('RUnit', dependencies = TRUE)
+```
+
+### News
+
+##### CHANGES IN VERSION 2.2.4
+
+INTERNAL MODIFICATION
+
+ * Additional running and installation tests added with planemo, conda, and travis
+
+##### CHANGES IN VERSION 2.2.3
+
+INTERNAL MODIFICATION
+
+ * Modifications of the 'qualitymetrics_script.R' file to handle the recent 'ropls' package versions (i.e. 1.3.15 and above) which use S4 classes
+
+ * Creating tests for the R code
+
+##### CHANGES IN VERSION 2.2.2
+
+INTERNAL MODIFICATION
+
+ * Minor internal modification
diff -r 000000000000 -r b4f5b5bc01dd build.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/build.xml Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff -r 000000000000 -r b4f5b5bc01dd easyrlibrary-lib/RcheckLibrary.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/easyrlibrary-lib/RcheckLibrary.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,124 @@
+######################################################
+# R check library
+# Coded by: M.Petera,
+# - -
+# R functions to use in R scripts
+# (management of various generic subroutines)
+# - -
+# V0: script structure + first functions
+# V1: More detailed error messages in match functions
+######################################################
+
+
+# Generic function to return an error if problems have been encountered - - - -
+
+check.err <- function(err.stock){
+
+ # err.stock = vector of results returned by check functions
+
+ if(length(err.stock)!=0){ stop("\n- - - - - - - - -\n",err.stock,"\n- - - - - - - - -\n") }
+
+}
+
+
+
+
+# Table match check functions - - - - - - - - - - - - - - - - - - - - - - - - -
+
+# To check if dataMatrix and (variable or sample)Metadata match regarding identifiers
+match2 <- function(dataMatrix, Metadata, Mtype){
+
+ # dataMatrix = data.frame containing dataMatrix
+ # Metadata = data.frame containing sampleMetadata or variableMetadata
+ # Mtype = "sample" or "variable" depending on Metadata content
+
+ err.stock <- NULL # error vector
+
+ id2 <- Metadata[,1]
+ if(Mtype=="sample"){ id1 <- colnames(dataMatrix)[-1] }
+ if(Mtype=="variable"){ id1 <- dataMatrix[,1] }
+
+ if( length(which(id1%in%id2))!=length(id1) || length(which(id2%in%id1))!=length(id2) ){
+ err.stock <- c("\nData matrix and ",Mtype," metadata do not match regarding ",Mtype," identifiers.")
+ if(length(which(id1%in%id2))!=length(id1)){
+ if(length(which(!(id1%in%id2)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the data matrix\n",
+ " do not appear in the ",Mtype," metadata file:\n")
+ identif <- id1[which(!(id1%in%id2))][1:min(3,length(which(!(id1%in%id2))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ if(length(which(id2%in%id1))!=length(id2)){
+ if(length(which(!(id2%in%id1)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the ",Mtype," metadata file\n",
+ " do not appear in the data matrix:\n")
+ identif <- id2[which(!(id2%in%id1))][1:min(3,length(which(!(id2%in%id1))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ err.stock <- c(err.stock,"\nPlease check your data.\n")
+ }
+
+ return(err.stock)
+
+}
+
+# To check if the 3 standard tables match regarding identifiers
+match3 <- function(dataMatrix, sampleMetadata, variableMetadata){
+
+ # dataMatrix = data.frame containing dataMatrix
+ # sampleMetadata = data.frame containing sampleMetadata
+ # variableMetadata = data.frame containing variableMetadata
+
+ err.stock <- NULL # error vector
+
+ id1 <- colnames(dataMatrix)[-1]
+ id2 <- sampleMetadata[,1]
+ id3 <- dataMatrix[,1]
+ id4 <- variableMetadata[,1]
+
+ if( length(which(id1%in%id2))!=length(id1) || length(which(id2%in%id1))!=length(id2) ){
+ err.stock <- c(err.stock,"\nData matrix and sample metadata do not match regarding sample identifiers.")
+ if(length(which(id1%in%id2))!=length(id1)){
+ if(length(which(!(id1%in%id2)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the data matrix\n",
+ " do not appear in the sample metadata file:\n")
+ identif <- id1[which(!(id1%in%id2))][1:min(3,length(which(!(id1%in%id2))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ if(length(which(id2%in%id1))!=length(id2)){
+ if(length(which(!(id2%in%id1)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the sample metadata file\n",
+ " do not appear in the data matrix:\n")
+ identif <- id2[which(!(id2%in%id1))][1:min(3,length(which(!(id2%in%id1))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ }
+
+ if( length(which(id3%in%id4))!=length(id3) || length(which(id4%in%id3))!=length(id4) ){
+ err.stock <- c(err.stock,"\nData matrix and variable metadata do not match regarding variable identifiers.")
+ if(length(which(id3%in%id4))!=length(id3)){
+ if(length(which(!(id3%in%id4)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the data matrix\n",
+ " do not appear in the variable metadata file:\n")
+ identif <- id3[which(!(id3%in%id4))][1:min(3,length(which(!(id3%in%id4))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ if(length(which(id4%in%id3))!=length(id4)){
+ if(length(which(!(id4%in%id3)))<4){ err.stock <- c(err.stock,"\n The ")
+ }else{ err.stock <- c(err.stock,"\n For example, the ") }
+ err.stock <- c(err.stock,"following identifiers found in the variable metadata file\n",
+ " do not appear in the data matrix:\n")
+ identif <- id4[which(!(id4%in%id3))][1:min(3,length(which(!(id4%in%id3))))]
+ err.stock <- c(err.stock," ",paste(identif,collapse="\n "),"\n")
+ }
+ }
+
+ if(length(err.stock)!=0){ err.stock <- c(err.stock,"\nPlease check your data.\n") }
+
+ return(err.stock)
+
+}
\ No newline at end of file
diff -r 000000000000 -r b4f5b5bc01dd easyrlibrary-lib/miniTools.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/easyrlibrary-lib/miniTools.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,133 @@
+#####################################################
+# Mini tools for Galaxy scripting
+# Coded by: M.Petera,
+# - -
+# R functions to use in R scripts and wrappers
+# to make things easier (lightening code, reducing verbose...)
+# - -
+# V0: script structure + first functions
+# V1: addition of functions to handle special characters in identifiers
+#####################################################
+
+
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+# Function to call packages without printing all the verbose
+# (only getting the essentials, like warning messages for example)
+
+shyLib <- function(...){
+ for(i in 1:length(list(...))){
+ suppressPackageStartupMessages(library(list(...)[[i]],character.only=TRUE))
+ }
+}
+
+#example: shyLib("xcms","pcaMethods")
+
+
+
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+# Fonction pour sourcer les scripts R requis
+# /!\ ATTENTION : actuellement la fonction n'est pas chargee au lancement du script,
+# il faut donc la copier-coller dans le wrapper R pour pouvoir l'utiliser.
+
+if(FALSE){
+source_local <- function(...){
+ argv <- commandArgs(trailingOnly = FALSE)
+ base_dir <- dirname(substring(argv[grep("--file=", argv)], 8))
+ for(i in 1:length(list(...))){
+ source(paste(base_dir, list(...)[[i]], sep="/"))
+ }
+}
+}
+
+#example: source_local("filter_script.R","RcheckLibrary.R")
+
+
+
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+# Functions to stock identifiers before applying make.names() and
+# to reinject it into final matrices
+# Note: it reproduces the original order of datasets' identifiers
+# - - -
+# stockID: stocks original identifiers and original order
+# -> needs checked data regarding table match
+# reproduceID: reinjects original identifiers and original order into final tables
+# -> function to be used at the very end, when exporting tables
+
+stockID <- function(dataMatrix, Metadata, Mtype){
+ # dataMatrix = data.frame containing dataMatrix
+ # Metadata = data.frame containing sampleMetadata or variableMetadata
+ # Mtype = "sample" or "variable" depending on Metadata content
+ cname <- colnames(dataMatrix)[1]
+ # dataMatrix temporary-stock + transfo - - - -
+ if(Mtype=="sample"){
+ id.ori <- colnames(dataMatrix)[-1]
+ colnames(dataMatrix) <- make.names(colnames(dataMatrix))
+ }
+ if(Mtype=="variable"){
+ id.ori <- dataMatrix[,1]
+ dataMatrix[,1] <- make.names(dataMatrix[,1])
+ }
+ # global stock - - - - - - - - - - - - - - - -
+ id.new <- data.frame(order.ori=c(1:length(Metadata[,1])),Metadata[,1],
+ id.new=make.names(Metadata[,1]),id.ori,
+ id.new.DM=make.names(id.ori),stringsAsFactors=FALSE)
+ colnames(id.new)[c(2,4)] <- c(colnames(Metadata)[1],cname)
+ # Metadata transfo + returning data - - - - -
+ Metadata[,1] <- make.names(Metadata[,1])
+ return(list(id.match=id.new, dataMatrix=dataMatrix, Metadata=Metadata))
+}
+#example: A<-stockID(myDM,mysM,"sample") ; myDM<-A$dataMatrix ; mysM<-A$Metadata ; A<-A$id.match
+
+reproduceID <- function(dataMatrix, Metadata, Mtype, id.match){
+ # dataMatrix = data.frame containing dataMatrix
+ # Metadata = data.frame containing sampleMetadata or variableMetadata
+ # Mtype = "sample" or "variable" depending on Metadata content
+ # id.match = 'id.match' element produced by stockID
+ #Metadada - - - - - - - - - - - - - -
+ temp.table <- id.match[,c(1,2,3)]
+ ## Removing deleted rows
+ for(i in 1:(dim(id.match)[1])){
+ if(!(temp.table[i,3]%in%Metadata[,1])){temp.table[i,1] <- 0}
+ }
+ if(length(which(temp.table[,1]==0))!=0){
+ temp.table <- temp.table[-c(which(temp.table[,1]==0)),]
+ }
+ ## Restoring original identifiers and order
+ temp.table <- merge(x=temp.table,y=Metadata,by.x=3,by.y=1)
+ temp.table <- temp.table[order(temp.table$order.ori),]
+ Metadata <- temp.table[,-c(1,2)]
+ rownames(Metadata) <- NULL
+ #dataMatrix - - - - - - - - - - - - -
+ rownames(dataMatrix)<-dataMatrix[,1]
+ if(Mtype=="sample"){
+ dataMatrix <- t(dataMatrix[,-1])
+ }
+ temp.table <- id.match[,c(1,4,5)]
+ ## Removing deleted rows
+ for(i in 1:(dim(id.match)[1])){
+ if(!(temp.table[i,3]%in%rownames(dataMatrix))){temp.table[i,1] <- 0}
+ }
+ if(length(which(temp.table[,1]==0))!=0){
+ temp.table <- temp.table[-c(which(temp.table[,1]==0)),]
+ }
+ ## Restoring original identifiers and order
+ temp.table <- merge(x=temp.table,y=dataMatrix,by.x=3,by.y=0)
+ temp.table <- temp.table[order(temp.table$order.ori),]
+ if(Mtype=="variable"){
+ dataMatrix <- temp.table[,-c(1,2,4)]
+ colnames(dataMatrix)[1] <- colnames(id.match)[4]
+ } else {
+ rownames(temp.table) <- temp.table[,3]
+ temp.table <- t(temp.table[,-c(1,2,3)])
+ dataMatrix <- data.frame(rownames(temp.table),temp.table)
+ colnames(dataMatrix)[1] <- colnames(id.match)[4]
+ }
+ rownames(dataMatrix) <- NULL
+ # return datasets - - - - - - - - - - -
+ return(list(dataMatrix=dataMatrix, Metadata=Metadata))
+}
+#example: B<-reproduceID(myDM,mysM,"sample",A) ; myDM<-B$dataMatrix ; mysM<-B$Metadata
+
+
+
+# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
diff -r 000000000000 -r b4f5b5bc01dd qualitymetrics_config.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/qualitymetrics_config.xml Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,354 @@
+
+ Metrics and graphics to check the quality of the data
+
+
+ R
+ r-batch
+ bioconductor-ropls
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. class:: infomark
+
+**Authors** Marion Landi, Melanie Petera and Etienne Thevenot (W4M Core Development Team)
+
+---------------------------------------------------
+
+.. class:: infomark
+
+**Tool updates**
+
+See the **NEWS** section at the bottom of this page
+
+---------------------------------------------------
+
+.. class:: infomark
+
+**References**
+
+| Thevenot EA., Roux A., Xu Y., Ezan E., and Junot C. (2015). Analysis of the human adult urinary metabolome variations with age, body mass index and gender by implementing a comprehensive workflow for univariate and OPLS statistical analyses. *Journal of Proteome Research*, **14**:3322-3335 (http://dx.doi.org/10.1021/acs.jproteome.5b00354)
+| Mason R., Tracy N. and Young J. (1997). A practical approach for interpreting multivariate T2 control chart signals. *Journal of Quality Technology*, **29**:396-406.
+| Alonso A., Julia A., Beltran A., Vinaixa M., Diaz M., Ibanez L., Correig X. and Marsal S. (2011). AStream: an R package for annotating LC/MS metabolomic data. *Bioinformatics*, **27**:1339-1340. (http://dx.doi.org/10.1093/bioinformatics/btr138)
+
+---------------------------------------------------
+
+========================
+Quality Metrics
+========================
+
+-----------
+Description
+-----------
+
+ | The **Quality Metrics** tool provides quality metrics of the samples and variables, and visualization of the data matrix
+ | The optional *Coefficient of Variation* arguments allows to flag the variables with a pool CV (or a pool CV over sample CV ratio) above a specific threshold
+ | The advanced *PoolAsPool1* argument is used when correlations with pool dilutions are computed: When set to TRUE [default], samples indicated as "pool" will be considered as "pool1" for the correlation together with the other pool dilutions (e.g. "pool2", "pool4", etc.); otherwise, "pool" samples will not be considered to compute the correlation (this enables the experimenter to have distinct "pool" samples for the computation of CV and "pool1" samples for the computation of dilution)
+ | The **sampleMetadata** is returned as output with 3 additional columns containing the p-values for the Hotellings'T2 and Z-scores of intensity deciles and proportion of missing values
+ | The **variableMetadata** is returned as output; in case a **sampleType** column is included in the input sampleMetadata file, additional columns will be added to indicate the variable quality metrics (eg mean, sd, CV on 'pool', 'sample' or 'blank', or correlation with pool dilutions, depending on the known type present in the 'sampleType' column)
+ | A **figure** is generated (pdf file) which illustrates the main computed sample and variable metric values
+
+
+
+-----------------
+Workflow position
+-----------------
+
+.. image:: QualityControl.png
+ :width: 800
+
+
+
+-----------
+Input files
+-----------
+
++----------------------------+---------+
+| Parameter : num + label | Format |
++============================+=========+
+| 1 : Data matrix file | tabular |
++----------------------------+---------+
+| 2 : Sample metadata file | tabular |
++----------------------------+---------+
+| 3 : Variable metadata file | tabular |
++----------------------------+---------+
+
+----------
+Parameters
+----------
+
+Data matrix
+ | contains the intensity values of the variables.
+ |
+
+Sample metadata file
+ | contains the metadata of the samples; in particular
+ | when the 'sampleType' column is available, with known types such as 'blank', 'sample', 'pool', 'poolN' (where N is a dilution factor of the pool), metrics will be computed (eg mean, sd, CV, correlation with the dilution factor, etc) for each variable (see the 'PoolAsPool1' argument below)
+ | 'pool' (and 'sample') should be present in the 'sampleType' column when setting the 'coefficient of variation' to TRUE
+ |
+
+Variable metadata file
+ | contains variable information.
+ |
+
+Note:
+ | Required formats for the dataMatrix, sampleMetadata, and variableMetadata files are described in the **HowTo** entitled 'Format Data For Postprocessing' available on the main page of Workflow4Metabolomics.org; the formats of the 3 tables can be further checked with the **Check Format** module
+ |
+
+Coefficient of Variation
+ | If 'yes' (not default): variables are classed according to the Coefficient of Variation (CV)
+ | i.e.: CV of pools (and CV of samples if needed) are calculated and compared to a defined threshold;
+ | then variables are classed with a 0/1 coding.
+ |
+
+Which type of CV calculation should be done (only if CV=yes)
+ | Type of CV comparison that will be used.
+ | 'ratio between pool and sample CVs' **OR** 'only pool CV'
+ |
+
+Threshold (only if CV=yes)
+ | If comparing pool and sample CVs, corresponds to the max ratio tolerated (basically between 1.0 and 1.25).
+ | Else corresponds to the max pool CV tolerated (basically 0.3).
+ |
+
+PoolAsPool1 (Advanced parameter)
+ | If 'poolN' (where N is a dilution factor) sample types are present in the 'sampleType' column of the sample metadata file, the Pearson correlation of the intensity with the dilution factor is computed for each variable; the 'PoolAsPool1' parameter indicates whether samples of 'pool' types should be considered as 'pool1' (and hence included in the computation of dilution correlations); default is TRUE
+
+
+------------
+Output files
+------------
+
+
+sampleMetadata.tabular
+ | tsv output
+ | 3 additional columns have been added to the input sampleMetadata file and contain the **p-values** of
+ | 1) the **Hotelling's T2** test in the first plane of PC components (Mason et al, 1997)
+ | 2) the **Z-score** of **intensity deciles** (Alonso et al, 2011)
+ | 3) the **Z-score** of the proportion of **missing values** (Alonso et al, 2011)
+ | for each test, low p-values indicate samples with extreme behaviour
+ |
+
+variableMetadata.tabular
+ | tsv output
+ | When the type of samples is available (ie the **sampleType** column is included in the input sampleMetadata file), variable metrics are computed: **sample**, **pool**, and **blank** **mean**, **sd** and **CV** (if the corresponding types are present in the 'sampleType' column), as well as **'blank' mean / 'sample' mean**, and **'pool' CV / 'sample' CV ratio**
+ | If pool dilutions have been used and are indicated in the 'sampleType' column as **poolN** where N is an integer indicating the dilution factor (eg **pool2** for a two-fold dilution of the pool; note that the non-diluted pool remains indicated as 'pool') the Pearson **correlation** (and corresponding p-value) between the intensity and the dilution factor is computed for each variable.
+ | When the **Coefficient of variation** argument is set to 'TRUE', the variableMetadata begins with 2 (or 3) columns indicating the pool CV (and the sample CV) and if the pool CV (or the ratio between pool CV and sample CV) is above the selected threshold
+ |
+
+figure.pdf
+ | Figure summarizing the various values of the computed metrics and tests; includes several visualizations of the samples (eg, PCA scores) and intensities (eg, image of the data matrix)
+ |
+
+information.txt
+ | Text file with informations regarding the metrics computed, eg those depending on the availability of the 'sampleMetadata' column, and specific types such as 'sample', 'pool', pool dilutions ('poolN'), or 'blank'
+ |
+
+
+---------------------------------------------------
+
+---------------
+Working example
+---------------
+
+|
+
+.. class:: infomark
+
+See the **W4M00001b_sacurine-complete** shared history in the **Shared Data/Published Histories** menu (https://galaxy.workflow4metabolomics.org/history/list_published)
+
+---------------------------------------------------
+
+----
+NEWS
+----
+
+CHANGES IN VERSION 2.2.4
+========================
+
+Additional running and installation tests added with planemo, conda, and travis
+
+CHANGES IN VERSION 2.2.3
+========================
+
+INTERNAL MODIFICATIONS
+
+Modifications of the **qualitymetrics_script.R** file to handle the recent **ropls** package versions (i.e. 1.3.15 and above) which use S4 classes
+
+Creating tests for the R code
+
+CHANGES IN VERSION 2.2.2
+========================
+
+Minor internal changes
+
+
+
+
+
+ 10.1021/acs.jproteome.5b00354
+ 10.1093/bioinformatics/btr138
+ @Article{Mason1997,
+ Title = {A practical approach for interpreting multivariate T2 control chart signals},
+ Author = {Mason, RL. and Tracy, ND. and Young, JC.},
+ Journal = {Journal of Quality Technology},
+ Year = {1997},
+ Number = {4},
+ Pages = {396-406},
+ Volume = {29},
+ }
+ 10.1093/bioinformatics/btu813
+
+
+
+
+
diff -r 000000000000 -r b4f5b5bc01dd qualitymetrics_script.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/qualitymetrics_script.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,896 @@
+################################################################################################
+# ANALYSES FOR QUALITY CONTROL #
+# #
+# Author: Melanie PETERA #
+# User: Galaxy #
+# Starting date: 04-09-2014 #
+# V-1.0: Restriction of old filter script to CV filter #
+# V-1.1: Addition of data check #
+# V-1.2: Substitution of deletion by addition of indicator variable #
+# V-1.3: Handling special characters #
+# #
+# #
+# Input files: dataMatrix ; sampleMetadata ; variableMetadata #
+# Output files: dataMatrix ; sampleMetadata ; variableMetadata #
+# #
+################################################################################################
+
+# Parameters (for dev)
+if(FALSE){
+
+ ion.file.in <- "test/ressources/inputs/ex_data_IONS.txt" #tab file
+ meta.samp.file.in <- "test/ressources/inputs/ex_data_PROTOCOLE1.txt" #tab file
+ meta.ion.file.in <- "test/ressources/inputs/ex_data_METAION.txt" #tab file
+
+ ## ion.file.out <- "test/ressources/outputs/QCtest_ex_data_IONS.txt" #tab file
+ meta.samp.file.out <- "test/ressources/outputs/QCtest_ex_data_PROTOCOLE1.txt" #tab file
+ meta.ion.file.out <- "test/ressources/outputs/QCtest_ex_data_METAION.txt" #tab file
+
+ CV <- TRUE ; if(CV){Compa<-TRUE;seuil<-1.25}else{Compa<-NULL;seuil<-NULL}
+
+ poolAsPool1L <- FALSE
+
+ if(FALSE) { ## Sacuri dataset
+
+ ## 'example' input dir
+ exaDirInpC <- "example/input"
+
+ ion.file.in <- file.path(exaDirInpC, "dataMatrix.tsv")
+ meta.samp.file.in <- file.path(exaDirInpC, "sampleMetadata.tsv")
+ meta.ion.file.in <- file.path(exaDirInpC, "variableMetadata.tsv")
+
+ poolAsPool1L <- FALSE
+
+ ## 'example' output dir
+ exaDirOutC <- gsub("input", "output", exaDirInpC)
+
+ mata.samp.file.out <- file.path(exaDirOutC, "sampleMetadata.tsv")
+ meta.ion.file_out <- file.path(exaDirOutC, "variableMetadata.tsv")
+ fig.out <- file.path(exaDirOutC, "figure.pdf")
+ log.out <- file.path(exaDirOutC, "information.txt")
+
+ stopifnot(file.exists(exaDirOutC))
+
+ }
+
+}
+
+QualityControl <- function(ion.file.in, meta.samp.file.in, meta.ion.file.in,
+ CV, Compa, seuil, poolAsPool1L,
+ ion.file.out, meta.samp.file.out, meta.ion.file.out, fig.out, log.out){
+ # This function allows to analyse data to check its quality
+ # It needs 3 datasets: the data matrix, the variables' metadata, the samples' metadata.
+ # It generates 3 new datasets corresponding to the 3 inputs with additional columns.
+ #
+ # Parameters:
+ # - xxx.in: input files' names
+ # - xxx.out: output files' names
+ # - CV: CV calculation yes/no
+ # | > Compa: comparing pool and sample CVs (TRUE) or simple pool CV calculation (FALSE)
+ # | > seuil: maximum ratio tolerated between pool and sample CVs or maximum pool CV
+
+
+# Input -----------------------------------------------------------------------------------
+
+ion.data <- read.table(ion.file.in,sep="\t",header=TRUE,check.names=FALSE, stringsAsFactors = FALSE)
+meta.samp.data <- read.table(meta.samp.file.in,sep="\t",header=TRUE,check.names=FALSE, stringsAsFactors = FALSE)
+meta.ion.data <- read.table(meta.ion.file.in,sep="\t",header=TRUE,check.names=FALSE, stringsAsFactors = FALSE)
+
+# Error vector
+err.stock <- "\n"
+
+# Table match check
+table.check <- match3(ion.data,meta.samp.data,meta.ion.data)
+check.err(table.check)
+
+# StockID
+samp.id <- stockID(ion.data,meta.samp.data,"sample")
+ion.data <- samp.id$dataMatrix
+meta.samp.data <- samp.id$Metadata
+samp.id <- samp.id$id.match
+
+
+# Function 1: CV calculation --------------------------------------------------------------
+# Allows to class ions according to the Coefficient of Variation (CV):
+# Compa=TRUE:
+# CV of pools and CV of samples are compared (ration between pools' one and samples' one)
+# and confronted to a given ration.
+# Compa=FALSE:
+# only CV of pools are considered ; compared to a given threshold
+
+if(CV){
+
+ # Checking the sampleType variable
+ if(is.null(meta.samp.data$sampleType)){
+ err.stock <- c(err.stock,"\n-------",
+ "\nWarning : no 'sampleType' variable detected in sample meta-data !",
+ "\nCV can not be calculated.\n-------\n")
+ }else{
+ if(!("pool"%in%levels(factor(meta.samp.data$sampleType)))){
+ err.stock <- c(err.stock,"\n-------",
+ "\nWarning : no 'pool' detected in 'sampleType' variable (sample meta-data) !",
+ "\nCV can not be calculated.\n-------\n")
+ }else{
+ if((!("sample"%in%levels(factor(meta.samp.data$sampleType))))&(Compa)){
+ err.stock <- c(err.stock,"\n-------",
+ "\nWarning : no 'sample' detected in 'sampleType' variable (sample meta-data) !",
+ "\nCV can not be calculated.\n-------\n")
+ }else{
+
+ # Statement
+ tmp.ion <- data.frame(CV.ind=rep(NA,nrow(ion.data)),CV.samp=rep(NA,nrow(ion.data)),
+ CV.pool=rep(NA,nrow(ion.data)),ion.data,stringsAsFactors=FALSE)
+ # CV samples
+ tmp.samp <- which(colnames(tmp.ion)%in%meta.samp.data[which(meta.samp.data$sampleType=="sample"),1])
+ tmp.ion$CV.samp <- apply(tmp.ion[,tmp.samp],1,function(x)sd(x, na.rm = TRUE)) / rowMeans(tmp.ion[,tmp.samp], na.rm = TRUE)
+ tmp.ion$CV.samp[which(apply(tmp.ion[,tmp.samp],1,function(x)sd(x, na.rm = TRUE))==0)] <- 0
+ # CV pools
+ tmp.samp <- which(colnames(tmp.ion)%in%meta.samp.data[which(meta.samp.data$sampleType=="pool"),1])
+ tmp.ion$CV.pool <- apply(tmp.ion[,tmp.samp],1,function(x)sd(x, na.rm = TRUE)) / rowMeans(tmp.ion[,tmp.samp], na.rm = TRUE)
+ tmp.ion$CV.pool[which(apply(tmp.ion[,tmp.samp],1,function(x)sd(x, na.rm = TRUE))==0)] <- 0
+ # CV indicator
+ if(Compa){tmp.ion$CV.ind <- ifelse((tmp.ion$CV.pool)/(tmp.ion$CV.samp)>seuil,0,1)
+ }else{tmp.ion$CV.ind <- ifelse((tmp.ion$CV.pool)>seuil,0,1)}
+ # Addition of new columns in meta.ion.data
+ if(Compa){tmp.ion<-tmp.ion[,c(4,2,3,1,1)]}else{tmp.ion<-tmp.ion[,c(4,3,1,1)]}
+ tmp.ion[,ncol(tmp.ion)] <- 1:nrow(tmp.ion)
+ meta.ion.data <- merge(x=meta.ion.data,y=tmp.ion,by.x=1,by.y=1)
+ meta.ion.data <- meta.ion.data[order(meta.ion.data[,ncol(meta.ion.data)]),][,-ncol(meta.ion.data)]
+ rownames(meta.ion.data) <- NULL
+
+ rm(tmp.ion,tmp.samp)
+
+ }}}
+
+} # end if(CV)
+
+## complementary metrics (ET)
+
+datMN <- t(as.matrix(ion.data[, -1]))
+colnames(datMN) <- ion.data[, 1]
+datMN <- datMN[, meta.ion.data[, 1]] ## in case meta.ion.data has been re-ordered during the CV = TRUE computations
+quaLs <- qualityMetricsF(datMN,
+ meta.samp.data,
+ meta.ion.data,
+ poolAsPool1L,
+ fig.out,
+ log.out)
+meta.samp.data <- quaLs[["samDF"]]
+meta.ion.data <- quaLs[["varDF"]]
+
+
+# Output ----------------------------------------------------------------------------------
+
+# Getting back original identifiers
+id.ori <- reproduceID(ion.data,meta.samp.data,"sample",samp.id)
+ion.data <- id.ori$dataMatrix
+meta.samp.data <- id.ori$Metadata
+
+
+# Error checking
+if(length(err.stock)>1){
+ stop(err.stock)
+}else{
+
+## write.table(ion.data, ion.file.out, sep="\t", row.names=FALSE, quote=FALSE)
+write.table(meta.samp.data, meta.samp.file.out, sep="\t", row.names=FALSE, quote=FALSE)
+write.table(meta.ion.data, meta.ion.file.out, sep="\t", row.names=FALSE, quote=FALSE)
+
+}
+
+
+} # end of QualityControl function
+
+
+# Typical function call
+# QualityControl(ion.file.in, meta.samp.file.in, meta.ion.file.in,
+# CV, Compa, seuil,
+# ion.file.out, meta.samp.file.out, meta.ion.file.out)
+
+
+qualityMetricsF <- function(datMN,
+ samDF,
+ varDF,
+ pooAsPo1L = TRUE,
+ fig.pdfC = NULL,
+ log.txtC = NULL) {
+
+ optWrnN <- options()$warn
+ options(warn = -1)
+
+
+ ##------------------------------
+ ## Functions
+ ##------------------------------
+
+
+ allDigF <- function (string) { ## from the Hmisc package (all.digits)
+ k <- length(string)
+ result <- logical(k)
+ for (i in 1:k) {
+ st <- string[i]
+ ls <- nchar(st)
+ ex <- substring(st, 1:ls, 1:ls)
+ result[i] <- all(match(ex, c("0", "1", "2", "3", "4",
+ "5", "6", "7", "8", "9"), nomatch = 0) > 0)
+ }
+ result
+ }
+
+ datPloF <- function() { ## ploting data matrix
+
+ thrVn <- c(pvalue=0.001,
+ poolCv=0.3)
+
+ ## Constants
+
+ marLs <- list(dri = c(2.1, 2.6, 1.1, 1.1),
+ ima = c(1.1, 2.6, 4.1, 1.1),
+ msd = c(2.1, 2.6, 1.1, 0.6),
+ sam = c(3.1, 3.6, 1.1, 0.6),
+ pca = c(2.6, 3.6, 1.1, 0.6),
+ sca = c(1.1, 4.1, 4.1, 0.6),
+ tit = c(0.1, 0.6, 1.1, 0.6))
+ palHeaVc <- rev(rainbow(ceiling(256 * 1.5))[1:256])
+
+ ## Functions
+
+ axiPreF <- function(valVn,
+ lenN) {
+
+ if(NA %in% valVn) {
+ warning("NA in valVn")
+ valVn <- as.vector(na.omit(valVn))
+ }
+
+ if(lenN < length(valVn))
+ stop("The length of in vector must be inferior to the length of the length parameter.")
+
+ if(length(valVn) < lenN)
+ valVn <- seq(from = min(valVn), to = max(valVn), length.out = lenN)
+
+ preValVn <- pretty(valVn)
+
+ preLabVn <- preAtVn <- c()
+
+ for(n in 1:length(preValVn))
+ if(min(valVn) < preValVn[n] && preValVn[n] < max(valVn)) {
+ preLabVn <- c(preLabVn, preValVn[n])
+ preAtVn <- c(preAtVn, which(abs(valVn - preValVn[n]) == min(abs(valVn - preValVn[n])))[1])
+ }
+
+ return(list(atVn = preAtVn,
+ labVn = preLabVn))
+
+ }
+
+ colF <- function(vecVn)
+ sapply(vecVn,
+ function(outN) {
+ if(outN < ploRgeVn[1])
+ return(palHeaVc[1])
+ else if(outN > ploRgeVn[2])
+ return(palHeaVc[256])
+ else return(palHeaVc[round((outN - ploRgeVn[1]) / diff(ploRgeVn) * 256 + 1)])})
+
+ obsColF <- function(typVc) {
+
+ ## available color palette
+ palVc <- palette()
+
+ ## colors for common types are set aside
+ palVc <- palVc[!(palVc %in% c("black", "red", "green3"))]
+
+ ## filling in the types with dedicated colors
+ samTypVc <- sort(unique(samDF[, "sampleType"]))
+ samColVc <- character(length(samTypVc))
+ if("blank" %in% samTypVc)
+ samColVc[grepl("blank", samTypVc)] <- "black"
+ if("pool" %in% samTypVc)
+ samColVc[grepl("pool", samTypVc)] <- "red"
+ if("sample" %in% samTypVc)
+ samColVc[grepl("sample", samTypVc)] <- "green4"
+
+ ## filling in the other types
+ palColI <- 1
+ palColMaxI <- length(palVc)
+
+ while(any(samColVc == "")) {
+ typToColI <- which(samColVc == "")[1]
+ if(palColI <= palColMaxI)
+ samColVc[typToColI] <- palVc[palColI]
+ else
+ samColVc[typToColI] <- "gray"
+ palColI <- palColI + 1
+ }
+
+ names(samColVc) <- samTypVc
+
+ samColVc[typVc]
+
+ }
+
+ par(font = 2,
+ font.axis = 2,
+ font.lab = 2,
+ pch=18)
+
+ layout(matrix(c(1, 3, 4, 5, 5,
+ 1, 7, 7, 7, 6,
+ 2, 7, 7, 7, 6),
+ byrow = TRUE,
+ nrow = 3),
+ heights = c(1.8, 1.2, 2.5),
+ widths = c(3.5, 1.8, 2.8, 1, 0.8))
+
+ ## Colors
+ ##-------
+
+ if("sampleType" %in% colnames(samDF)) {
+ obsColVc <- obsColF(samDF[, "sampleType"])
+ } else
+ obsColVc <- rep("black", nrow(samDF))
+
+ ## PCA and Hotelling ellipse
+ ##--------------------------
+
+ vVn <- getPcaVarVn(ropLs)
+ vRelVn <- vVn / ncol(datMN)
+
+ par(mar = marLs[["pca"]])
+
+ plot(ropScoreMN,
+ type = "n",
+ xlab = "",
+ ylab = "",
+ xlim = range(ropScoreMN[, 1]) * 1.1)
+ mtext(paste("t1 (", round(vRelVn[1] * 100), "%)", sep = ""),
+ cex = 0.7,
+ line = 2,
+ side = 1)
+ mtext(paste("t2 (", round(vRelVn[2] * 100), "%)", sep = ""),
+ cex = 0.7,
+ las = 0,
+ line = 2,
+ side = 2)
+ abline(h = 0, lty = "dashed")
+ abline(v = 0, lty = "dashed")
+ radVn <- seq(0, 2 * pi, length.out = 100)
+
+ hotFisN <- hotN * qf(1 - thrVn["pvalue"], 2, n - 2)
+ lines(sqrt(var(ropScoreMN[, 1]) * hotFisN) * cos(radVn),
+ sqrt(var(ropScoreMN[, 2]) * hotFisN) * sin(radVn))
+
+ text(ropScoreMN[, 1],
+ ropScoreMN[, 2],
+ cex = 0.7,
+ col = obsColVc,
+ labels = rownames(datMN))
+
+ if("sampleType" %in% colnames(samDF)) {
+ obsColVuc <- obsColVc[sort(unique(names(obsColVc)))]
+ legOrdVc <- c("blank", paste0("pool", 8:1), "pool", "other", "sample")
+ obsColVuc <- obsColVuc[legOrdVc[legOrdVc %in% names(obsColVuc)]]
+
+ text(rep(par("usr")[1], times = length(obsColVuc)),
+ par("usr")[3] + (0.97 - length(obsColVuc) * 0.03 + 1:length(obsColVuc) * 0.03) * diff(par("usr")[3:4]),
+ col = obsColVuc,
+ font = 2,
+ labels = names(obsColVuc),
+ pos = 4)
+ }
+
+ ## Missing/low intensities and decile values
+ ##------------------------------------------
+
+ par(mar = marLs[["sam"]])
+
+ plot(missZscoVn,
+ deciZscoMaxVn,
+ type = "n",
+ xlab = "",
+ ylab = "",
+ xlim = c(min(missZscoVn),
+ max(missZscoVn) + 0.5))
+ mtext("amount of missing values (z-score)",
+ cex = 0.7,
+ line = 2,
+ side = 1)
+ mtext("deciles (zscore)",
+ cex = 0.7,
+ las = 0,
+ line = 2,
+ side = 2)
+ abline(h = qnorm(1 - thrVn["pvalue"] / 2) * c(-1, 1), lty = "dashed")
+ abline(v = qnorm(1 - thrVn["pvalue"] / 2) * c(-1, 1), lty = "dashed")
+ text(missZscoVn,
+ deciZscoMaxVn,
+ cex = 0.7,
+ col = obsColVc,
+ labels = rownames(datMN))
+
+ ## tit: Title
+ ##-----------
+
+ par(mar = marLs[["tit"]])
+ plot(0:1, bty = "n", type = "n", xaxt = "n", yaxt = "n", xlab = "", ylab = "")
+ text(1.5, 1, cex = 1.3, labels = "Quality Metrics")
+ text(1, 0.85, adj=0, cex = 1.1, labels = paste0("NAs: ",
+ round(length(which(is.na(c(datMN)))) / cumprod(dim(datMN))[2] * 100), "%"))
+ text(1, 0.75, adj=0, cex = 1.1, labels = paste0("0 values: ",
+ round(sum(abs(datMN) < epsN, na.rm=TRUE) / cumprod(dim(datMN))[2] * 100, 2), "%"))
+ text(1, 0.65, adj=0, cex = 1.1, labels = paste0("min: ", signif(min(datMN, na.rm=TRUE), 2)))
+ text(1, 0.55, adj=0, cex = 1.1, labels = paste0("median: ", signif(median(datMN, na.rm=TRUE), 2)))
+ text(1, 0.45, adj=0, cex = 1.1, labels = paste0("mean: ", signif(mean(datMN, na.rm=TRUE), 2)))
+ text(1, 0.35, adj=0, cex = 1.1, labels = paste0("max: ", signif(max(datMN, na.rm=TRUE), 2)))
+ if("sampleType" %in% colnames(samDF) &&
+ "pool" %in% samDF[, "sampleType"])
+ text(1,
+ 0.25,
+ adj=0, cex = 1.1,
+ labels = paste0("pool CV < ",
+ round(thrVn["poolCv"] * 100), "%: ",
+ round(sum(varDF[, "pool_CV"] < thrVn["poolCv"]) / nrow(varDF) * 100),
+ "%"))
+
+ text(1, 0.1, adj=0, labels = paste0("Thresholds used in plots:"))
+ text(1, 0, adj=0, labels = paste0(" p-value = ", thrVn["pvalue"]))
+
+ ## dri: Analytical drift
+ ##----------------------
+
+ par(mar = marLs[["dri"]])
+
+ ## ordering
+
+ driDatMN <- datMN
+ driSamDF <- samDF
+
+ driSamDF[, "ordIniVi"] <- 1:nrow(driDatMN)
+
+ if("injectionOrder" %in% colnames(driSamDF)) {
+ if("batch" %in% colnames(driSamDF))
+ ordVi <- order(driSamDF[, "batch"],
+ driSamDF[, "injectionOrder"])
+ else
+ ordVi <- order(driSamDF[, "injectionOrder"])
+ } else
+ ordVi <- 1:nrow(driDatMN)
+
+ driDatMN <- driDatMN[ordVi, ]
+ driSamDF <- driSamDF[ordVi, ]
+
+ driColVc <- rep("black", nrow(driDatMN))
+ if("sampleType" %in% colnames(driSamDF))
+ driColVc <- obsColF(driSamDF[, "sampleType"])
+
+ plot(rowSums(driDatMN, na.rm=TRUE),
+ col = driColVc,
+ pch = 18,
+ xlab = "",
+ ylab = "")
+
+ mtext("injection order",
+ cex = 0.7,
+ line = 2,
+ side = 1)
+
+ mtext("Sum of intens. for all variables",
+ cex = 0.7,
+ line = 2,
+ side = 2)
+
+ ## msd: Sd vs Mean plot
+ ##---------------------
+
+ par(mar = marLs[["msd"]])
+ plot(apply(datMN, 2, function(y) mean(y, na.rm = TRUE)),
+ apply(datMN, 2, function(y) sd(y, na.rm = TRUE)),
+ col=obsColVc,
+ pch = 18,
+ xlab = "",
+ ylab = "")
+ mtext("mean",
+ cex = 0.7,
+ line = 2,
+ side = 1)
+ mtext("sd",
+ cex = 0.7,
+ line = 2,
+ side = 2)
+
+ ## sca-6: Color scale
+ ##-------------------
+
+ par(mar = marLs[["sca"]])
+
+ ylimVn <- c(0, 256)
+ ybottomVn <- 0:255
+ ytopVn <- 1:256
+
+ plot(x = 0,
+ y = 0,
+ font.axis = 2,
+ font.lab = 2,
+ type = "n",
+ xlim = c(0, 1),
+ ylim = ylimVn,
+ xlab = "",
+ ylab = "",
+ xaxs = "i",
+ yaxs = "i",
+ xaxt = "n",
+ yaxt = "n")
+
+ rect(xleft = 0,
+ ybottom = ybottomVn,
+ xright = 1,
+ ytop = ytopVn,
+ col = palHeaVc,
+ border = NA)
+
+ eval(parse(text = paste("axis(at = axiPreF(c(ifelse(min(datMN, na.rm = TRUE) == -Inf, yes = 0, no = min(datMN, na.rm = TRUE)) , max(datMN, na.rm = TRUE)), 256)$atVn,
+ font = 2,
+ font.axis = 2,
+ labels = axiPreF(c(ifelse(min(datMN, na.rm = TRUE) == -Inf, yes = 0, no = min(datMN, na.rm = TRUE)), max(datMN, na.rm = TRUE)), 256)$labVn,
+ las = 1,
+ lwd = 2,
+ lwd.ticks = 2,
+ side = 2,
+ xpd = TRUE)", sep = "")))
+
+ arrows(par("usr")[1],
+ par("usr")[4],
+ par("usr")[1],
+ par("usr")[3],
+ code = 0,
+ lwd = 2,
+ xpd = TRUE)
+
+ ## ima: Image
+ ##-----------
+
+ par(mar = marLs[["ima"]])
+
+ ploRgeVn <- range(datMN, na.rm = TRUE)
+
+ imaMN <- t(datMN)[, rev(1:nrow(datMN)), drop = FALSE]
+
+ image(x = 1:nrow(imaMN),
+ y = 1:ncol(imaMN),
+ z = imaMN,
+ col = palHeaVc,
+ font.axis = 2,
+ font.lab = 2,
+ xaxt = "n",
+ yaxt = "n",
+ xlab = "",
+ ylab = "")
+
+ if(length(rownames(datMN)) == 0) {
+ rowNamVc <- rep("", times = nrow(datMN))
+ } else
+ rowNamVc <- rownames(datMN)
+
+ if(length(colnames(datMN)) == 0) {
+ colNamVc <- rep("", times = ncol(datMN))
+ } else
+ colNamVc <- colnames(datMN)
+
+ xlaVc <- paste(paste(rep("[", 2),
+ c(1, nrow(imaMN)),
+ rep("] ", 2),
+ sep = ""),
+ rep("\n", times = 2),
+ c(colNamVc[1], tail(colNamVc, 1)),
+ sep = "")
+
+ for(k in 1:2)
+ axis(side = 3,
+ hadj = c(0, 1)[k],
+ at = c(1, nrow(imaMN))[k],
+ cex = 0.8,
+ font = 2,
+ labels = xlaVc[k],
+ line = -0.5,
+ tick = FALSE)
+
+
+ ylaVc <- paste(paste(rep("[", times = 2),
+ c(ncol(imaMN), 1),
+ rep("]", times = 2),
+ sep = ""),
+ rep("\n", times = 2),
+ c(tail(rowNamVc, 1), rowNamVc[1]),
+ sep = "")
+
+ for(k in 1:2)
+ axis(side = 2,
+ at = c(1, ncol(imaMN))[k],
+ cex = 0.8,
+ font = 2,
+ hadj = c(0, 1)[k],
+ labels = ylaVc[k],
+ las = 0,
+ line = -0.5,
+ lty = "blank",
+ tick = FALSE)
+
+ box(lwd = 2)
+
+
+ }
+
+
+ zScoreF <- function(x) {
+ sdxN <- sd(x, na.rm = TRUE)
+ if(sdxN < epsN)
+ return(rep(0, length(x)))
+ else
+ return((x - mean(x, na.rm = TRUE)) / sdxN)
+ }
+
+
+ ## Option
+ ##-------
+
+ strAsFacL <- options()$stringsAsFactors
+ options(stingsAsFactors = FALSE)
+
+ ## Constants
+ ##----------
+
+ epsN <- .Machine[["double.eps"]] ## [1] 2.22e-16
+
+
+ ##------------------------------
+ ## Start
+ ##------------------------------
+
+ if(!is.null(log.txtC))
+ sink(log.txtC)
+
+ ## Description
+ ##------------
+
+ cat("\n\nData description:\n\n", sep = "")
+ cat("observations:", nrow(datMN), "\n")
+ cat("variables:", ncol(datMN), "\n")
+ cat("missing:", sum(is.na(datMN)), "\n")
+ cat("0 values (%):",
+ sum(abs(datMN) < epsN, na.rm=TRUE) / cumprod(dim(datMN))[2] * 100, "\n")
+ cat("min:", min(datMN, na.rm=TRUE), "\n")
+ cat("mean:", signif(mean(datMN, na.rm=TRUE), 2), "\n")
+ cat("median:", signif(median(datMN, na.rm=TRUE), 2), "\n")
+ cat("max:", signif(max(datMN, na.rm=TRUE), 2), "\n")
+
+ if("sampleType" %in% colnames(samDF)) {
+ cat("\nSample types:\n", sep = "")
+ print(table(samDF[, "sampleType"]))
+ cat("\n", sep="")
+ }
+
+
+ ##------------------------------
+ ## Variable metrics
+ ##------------------------------
+
+
+ ## 'blank' observations
+
+ if("sampleType" %in% colnames(samDF) && "blank" %in% samDF[, "sampleType"]) {
+
+ cat("\nVariables: Blank mean, sd, and CV\n", sep="")
+
+ blkVl <- samDF[, "sampleType"] == "blank"
+
+ if(sum(blkVl) == 1)
+ varDF[, "blank_mean"] <- datMN[blkVl, ]
+ else
+ varDF[, "blank_mean"] <- apply(datMN[blkVl, , drop=FALSE], 2, function(varVn) mean(varVn, na.rm=TRUE))
+
+ if(sum(blkVl) == 1)
+ varDF[, "blank_sd"] <- rep(0, nrow(varDF))
+ else
+ varDF[, "blank_sd"] <- apply(datMN[blkVl, , drop=FALSE], 2, function(varVn) sd(varVn, na.rm=TRUE))
+
+ varDF[, "blank_CV"] <- varDF[, "blank_sd"] / varDF[, "blank_mean"]
+
+ }
+
+
+ ## 'sample' observations
+
+ if("sampleType" %in% colnames(samDF) && "sample" %in% samDF[, "sampleType"]) {
+
+ cat("\nVariables: Sample mean, sd, and CV\n", sep="")
+
+ samVl <- samDF[, "sampleType"] == "sample"
+
+ if(sum(samVl) == 1)
+ varDF[, "sample_mean"] <- datMN[samVl, ]
+ else
+ varDF[, "sample_mean"] <- apply(datMN[samVl, , drop=FALSE], 2, function(varVn) mean(varVn, na.rm=TRUE))
+
+ if(sum(samVl) == 1)
+ varDF[, "sample_sd"] <- rep(0, nrow(varDF))
+ else
+ varDF[, "sample_sd"] <- apply(datMN[samVl, , drop=FALSE], 2, function(varVn) sd(varVn, na.rm=TRUE))
+
+ varDF[, "sample_CV"] <- varDF[, "sample_sd"] / varDF[, "sample_mean"]
+
+ }
+
+ ## 'blank' mean / 'sample' mean ratio
+
+ if(all(c("blank_mean", "sample_mean") %in% colnames(varDF))) {
+
+ cat("\nVariables: Blank mean over sample mean\n", sep="")
+
+ varDF[, "blankMean_over_sampleMean"] <- varDF[, "blank_mean"] / varDF[, "sample_mean"]
+
+ }
+
+ ## 'pool' observations
+
+ if("sampleType" %in% colnames(samDF) && "pool" %in% samDF[, "sampleType"]) {
+
+ cat("\nVariables: Pool mean, sd, and CV\n", sep="")
+
+ pooVl <- samDF[, "sampleType"] == "pool"
+
+ if(sum(pooVl) == 1)
+ varDF[, "pool_mean"] <- datMN[pooVl, ]
+ else
+ varDF[, "pool_mean"] <- apply(datMN[pooVl, , drop=FALSE], 2, function(varVn) mean(varVn, na.rm=TRUE))
+
+ if(sum(pooVl) == 1)
+ varDF[, "pool_sd"] <- rep(0, nrow(varDF))
+ else
+ varDF[, "pool_sd"] <- apply(datMN[pooVl, , drop=FALSE], 2, function(varVn) sd(varVn, na.rm=TRUE))
+
+ varDF[, "pool_CV"] <- varDF[, "pool_sd"] / varDF[, "pool_mean"]
+
+ }
+
+ ## 'pool' CV / 'sample' CV ratio
+
+ if(all(c("pool_CV", "sample_CV") %in% colnames(varDF))) {
+
+ cat("\nVariables: Pool CV over sample CV\n", sep="")
+
+ varDF[, "poolCV_over_sampleCV"] <- varDF[, "pool_CV"] / varDF[, "sample_CV"]
+
+ }
+
+
+ ## 'pool' dilutions
+
+ if("sampleType" %in% colnames(samDF) && any(grepl("pool.+", samDF[, "sampleType"]))) {
+
+ pooVi <- grep("pool.*", samDF[, "sampleType"]) ## pool, pool2, pool4, poolInter, ...
+
+ pooNamVc <- samDF[pooVi, "sampleType"]
+
+ if(pooAsPo1L) {
+
+ pooNamVc[pooNamVc == "pool"] <- "pool1" ## 'pool' -> 'pool1'
+
+ } else {
+
+ pooVl <- pooNamVc == "pool"
+ pooVi <- pooVi[!pooVl]
+ pooNamVc <- pooNamVc[!pooVl]
+
+ }
+
+ pooDilVc <- gsub("pool", "", pooNamVc)
+
+ pooDilVl <- sapply(pooDilVc, allDigF)
+
+ if(sum(pooDilVl)) {
+
+ cat("\nVariables: Pool dilutions\n", sep="")
+
+ pooNamVc <- pooNamVc[pooDilVl] ## for the plot
+
+ pooVi <- pooVi[pooDilVl]
+
+ dilVn <- 1 / as.numeric(pooDilVc[pooDilVl])
+
+ varDF[, "poolDil_correl"] <- apply(datMN[pooVi, , drop=FALSE], 2,
+ function(varVn) cor(dilVn, varVn))
+
+ varDF[, "poolDil_pval"] <- apply(datMN[pooVi, , drop=FALSE], 2,
+ function(varVn) cor.test(dilVn, varVn)[["p.value"]])
+
+ }
+
+ }
+
+
+ ##------------------------------
+ ## Sample metrics
+ ##------------------------------
+
+
+ ## Hotelling: p-value associated to the distance from the center in the first PCA score plane
+
+ cat("\nObservations: Hotelling ellipse\n", sep="")
+
+ ropLs <- opls(datMN, predI = 2, plotL = FALSE, printL = FALSE)
+
+ ropScoreMN <- getScoreMN(ropLs)
+
+ invCovScoMN <- solve(cov(ropScoreMN))
+
+ n <- nrow(datMN)
+ hotN <- 2 * (n - 1) * (n^2 - 1) / (n^2 * (n - 2))
+
+ hotPvaVn <- apply(ropScoreMN,
+ 1,
+ function(x)
+ 1 - pf(1 / hotN * t(as.matrix(x)) %*% invCovScoMN %*% as.matrix(x), 2, n - 2))
+
+ samDF[, "hotelling_pval"] <- hotPvaVn
+
+ ## p-value associated to number of missing values
+
+ cat("\nObservations: Missing values\n", sep="")
+
+ missZscoVn <- zScoreF(apply(datMN,
+ 1,
+ function(rowVn) {
+ sum(is.na(rowVn))
+ }))
+
+ samDF[, "missing_pval"] <- sapply(missZscoVn, function(zscoN) 2 * (1 - pnorm(abs(zscoN))))
+
+ ## p-value associated to the deciles of the profiles
+
+ cat("\nObservations: Profile deciles\n", sep="")
+
+ deciMN <- t(as.matrix(apply(datMN,
+ 1,
+ function(x) quantile(x, 0.1 * 1:9, na.rm = TRUE))))
+
+ deciZscoMN <- apply(deciMN, 2, zScoreF)
+
+ deciZscoMaxVn <- apply(deciZscoMN, 1, function(rowVn) rowVn[which.max(abs(rowVn))])
+
+ samDF[, "decile_pval"] <- sapply(deciZscoMaxVn, function(zscoN) 2 * (1 - pnorm(abs(zscoN))))
+
+
+ ##------------------------------
+ ## Figure
+ ##------------------------------
+
+ cat("\nPlotting\n")
+
+ if(!is.null(fig.pdfC)) {
+ pdf(fig.pdfC, width=11, height=7)
+ } else
+ dev.new(width=11, height=7)
+
+ datPloF()
+
+ if(!is.null(fig.pdfC))
+ dev.off()
+
+
+ ##------------------------------
+ ## End
+ ##------------------------------
+
+
+ if(!is.null(log.txtC))
+ sink()
+
+ options(stingsAsFactors = strAsFacL)
+ options(warn = optWrnN)
+
+ return(list(samDF=samDF,
+ varDF=varDF))
+
+
+} ## qualityMetricsF
diff -r 000000000000 -r b4f5b5bc01dd qualitymetrics_wrapper.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/qualitymetrics_wrapper.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,51 @@
+#!/usr/bin/Rscript --vanilla --slave --no-site-file
+
+################################################################################################
+# WRAPPER FOR QC_script.R (ANALYSES FOR QUALITY CONTROL) #
+# #
+# Author: Melanie PETERA based on Marion LANDI's filters' wrapper #
+# User: Galaxy #
+# Original data: used with QC_script.R #
+# Starting date: 04-09-2014 #
+# V-1: Restriction of old filter wrapper to quality control (CV) #
+# #
+# #
+# Input files: dataMatrix.txt ; sampleMetadata.txt ; variableMetadata.txt #
+# Output files: dataMatrix.txt ; sampleMetadata.txt ; variableMetadata.txt #
+# #
+################################################################################################
+
+
+library(batch) #necessary for parseCommandArgs function
+args = parseCommandArgs(evaluate=FALSE) #interpretation of arguments given in command line as an R list of objects
+
+source_local <- function(...){
+ argv <- commandArgs(trailingOnly = FALSE)
+ base_dir <- dirname(substring(argv[grep("--file=", argv)], 8))
+ for(i in 1:length(list(...))){source(paste(base_dir, list(...)[[i]], sep="/"))}
+}
+#Import the different functions
+source_local("qualitymetrics_script.R", "easyrlibrary-lib/RcheckLibrary.R", "easyrlibrary-lib/miniTools.R")
+
+
+suppressMessages(library(ropls)) ## to be used in qualityMetricsF
+
+if(packageVersion("ropls") < "1.4.0")
+ stop("Please use 'ropls' versions of 1.4.0 and above")
+
+if(length(args) < 9){ stop("NOT enough arguments !!!") }
+
+args$Compa <- as.logical(args$Compa)
+args$poolAsPool1L <- as.logical(args$poolAsPool1L)
+
+QualityControl(args$dataMatrix_in, args$sampleMetadata_in, args$variableMetadata_in,
+ args$CV, args$Compa, args$seuil, args$poolAsPool1L,
+ args$dataMatrix_out, args$sampleMetadata_out, args$variableMetadata_out, args$figure, args$information)
+
+#QualityControl(ion.file.in, meta.samp.file.in, meta.ion.file.in,
+# CV, Compa, seuil,
+# ion.file.out, meta.samp.file.out, meta.ion.file.out)
+
+#delete the parameters to avoid the passage to the next tool in .RData image
+rm(args)
+
diff -r 000000000000 -r b4f5b5bc01dd runit/input/dataMatrix.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/input/dataMatrix.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+dataMatrix QC_4 sam_44 sam_18 sam_23 blk_3 sam_9 sam_22 QC_6 blk_4
+met_031 5601185.9 4446133.4 4144765.4 3085899.9 NA 6748534.9 5819543.8 3256720.3 NA
+met_032 4.07 4.08 4.11 4.1 NA 4.04 4.13 4.11 NA
+met_033 1448205184 1456986135 993364802.3 1162711600 5569143.2 1043559922 1465003454 1052094028 5247494.3
+met_034 4.11 4.21 4.18 4.1 4.09 4.1 4.14 4.11 4.08
+met_035 3777580.7 2296751 1890711.7 1767424.6 6567.5 1906253.5 3043253.9 2856958.5 7940.8
+met_036 4.12 4.21 4.26 4.1 4.11 4.22 4.27 4.12 4.2
+met_037 4982658.7 3751181.8 4219033.2 2425759.9 NA 11978184.4 4306459.5 3352187 NA
+met_038 4.45 4.38 4.4 4.4 NA 4.44 4.46 4.32 NA
+met_039 6658087.7 3231434.7 2932986.5 4098788.3 NA 3691132.6 6108614.4 4541941.9 NA
+met_040 4.49 4.56 4.48 4.5 NA 4.45 4.54 4.46 NA
diff -r 000000000000 -r b4f5b5bc01dd runit/input/sampleMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/input/sampleMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,10 @@
+sampleMetadata injectionOrder batch sampleType
+QC_4 19 batch1 pool
+sam_44 20 batch1 sample
+sam_18 23 batch1 sample
+sam_23 27 batch1 sample
+blk_3 31 batch1 blank
+sam_9 34 batch1 sample
+sam_22 38 batch1 sample
+QC_6 42 batch1 pool
+blk_4 43 batch1 blank
diff -r 000000000000 -r b4f5b5bc01dd runit/input/variableMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/input/variableMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+variableMetadata number
+met_031 31
+met_032 32
+met_033 33
+met_034 34
+met_035 35
+met_036 36
+met_037 37
+met_038 38
+met_039 39
+met_040 40
diff -r 000000000000 -r b4f5b5bc01dd runit/output/figure.pdf
Binary file runit/output/figure.pdf has changed
diff -r 000000000000 -r b4f5b5bc01dd runit/output/information.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/output/information.txt Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,36 @@
+
+
+Data description:
+
+observations: 9
+variables: 10
+missing: 12
+0 values (%): 0
+min: 4.04
+mean: 1.1e+08
+median: 3300
+max: 1.5e+09
+
+Sample types:
+
+ blank pool sample
+ 2 2 5
+
+
+Variables: Blank mean, sd, and CV
+
+Variables: Sample mean, sd, and CV
+
+Variables: Blank mean over sample mean
+
+Variables: Pool mean, sd, and CV
+
+Variables: Pool CV over sample CV
+
+Observations: Hotelling ellipse
+
+Observations: Missing values
+
+Observations: Profile deciles
+
+Plotting
diff -r 000000000000 -r b4f5b5bc01dd runit/output/sampleMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/output/sampleMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,10 @@
+sampleMetadata injectionOrder batch sampleType hotelling_pval missing_pval decile_pval
+QC_4 19 batch1 pool 0.656135121662705 0.614294664663482 0.152779336765535
+sam_44 20 batch1 sample 0.465535744005279 0.614294664663482 0.0519209513268233
+sam_18 23 batch1 sample 0.851371602675381 0.614294664663482 0.183084409434003
+sam_23 27 batch1 sample 0.672662842796115 0.614294664663482 0.274841934352591
+blk_3 31 batch1 blank 0.55999189346843 0.0777598964393293 0.0482773048447409
+sam_9 34 batch1 sample 0.181937097726123 0.614294664663482 0.0612632186177473
+sam_22 38 batch1 sample 0.32958046545209 0.614294664663482 0.183084409434003
+QC_6 42 batch1 pool 0.568077738575854 0.614294664663482 0.430072456425572
+blk_4 43 batch1 blank 0.648119964672735 0.0777598964393293 0.0491375964282466
diff -r 000000000000 -r b4f5b5bc01dd runit/output/variableMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/output/variableMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+variableMetadata number blank_mean blank_sd blank_CV sample_mean sample_sd sample_CV blankMean_over_sampleMean pool_mean pool_sd pool_CV poolCV_over_sampleCV
+met_031 31 NA NA NA 4848975.48 1441956.82432615 0.297373502974725 NA 4428953.1 1657787.52401859 0.37430685911273 1.25870951974004
+met_032 32 NA NA NA 4.092 0.0342052627529741 0.008359057368762 NA 4.09 0.0282842712474619 0.00691546974265573 0.827302581807729
+met_033 33 5408318.75 227440.118351194 0.0420537562345404 1224325182.66 224650928.867811 0.183489592511467 0.00441738749361485 1250149606 280092884.511242 0.22404749252966 1.22103651473126
+met_034 34 4.085 0.00707106781186532 0.00173098355247621 4.146 0.048785243670602 0.0117668219176561 0.985287023637241 4.11 0 0 0
+met_035 35 7254.15 971.069742603486 0.133864028535871 2180878.94 521458.904577554 0.239104929216087 0.00332625065378457 3317269.6 650978.200530878 0.196239160221068 0.820724026327874
+met_036 36 4.155 0.0636396103067892 0.0153163923722718 4.212 0.0676017751246223 0.0160498041606416 0.986467236467237 4.12 0 0 0
+met_037 37 NA NA NA 5336123.76 3788381.19013789 0.70995002374868 NA 4167422.85 1152917.59560276 0.276650015393268 0.389675337895617
+met_038 38 NA NA NA 4.416 0.03286335345031 0.00744188257479845 NA 4.385 0.0919238815542511 0.0209632569108896 2.81692927833619
+met_039 39 NA NA NA 4012591.3 1252979.510233 0.312261931643275 NA 5600014.8 1496341.04515943 0.26720305188469 0.855701655589386
+met_040 40 NA NA NA 4.506 0.0444971909225737 0.0098750978523244 NA 4.475 0.0212132034355966 0.00474038065599924 0.480033790741977
diff -r 000000000000 -r b4f5b5bc01dd runit/qualitymetrics_runtests.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/qualitymetrics_runtests.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,106 @@
+#!/usr/bin/env Rscript
+
+## Package
+##--------
+
+library(RUnit)
+
+## Constants
+##----------
+
+testOutDirC <- "output"
+argVc <- commandArgs(trailingOnly = FALSE)
+scriptPathC <- sub("--file=", "", argVc[grep("--file=", argVc)])
+
+
+## Functions
+##-----------
+
+## Reading tables (matrix or data frame)
+readTableF <- function(fileC, typeC = c("matrix", "dataframe")[1]) {
+
+ file.exists(fileC) || stop(paste0("No output file \"", fileC ,"\"."))
+
+ switch(typeC,
+ matrix = return(t(as.matrix(read.table(file = fileC,
+ header = TRUE,
+ row.names = 1,
+ sep = "\t",
+ stringsAsFactors = FALSE)))),
+ dataframe = return(read.table(file = fileC,
+ header = TRUE,
+ row.names = 1,
+ sep = "\t",
+ stringsAsFactors = FALSE)))
+
+}
+
+## Call wrapper
+wrapperCallF <- function(paramLs) {
+
+ ## Set program path
+ wrapperPathC <- file.path(dirname(scriptPathC), "..", "qualitymetrics_wrapper.R")
+
+ ## Set arguments
+ argLs <- NULL
+ for (parC in names(paramLs))
+ argLs <- c(argLs, parC, paramLs[[parC]])
+
+ ## Call
+ wrapperCallC <- paste(c(wrapperPathC, argLs), collapse = " ")
+
+ if(.Platform$OS.type == "windows")
+ wrapperCallC <- paste("Rscript", wrapperCallC)
+
+ wrapperCodeN <- system(wrapperCallC)
+
+ if (wrapperCodeN != 0)
+ stop("Error when running qualitymetrics_wrapper.R.")
+
+ ## Get output
+ outLs <- list()
+ if ("dataMatrix_out" %in% names(paramLs))
+ outLs[["datMN"]] <- readTableF(paramLs[["dataMatrix_out"]], "matrix")
+ if ("sampleMetadata_out" %in% names(paramLs))
+ outLs[["samDF"]] <- readTableF(paramLs[["sampleMetadata_out"]], "dataframe")
+ if ("variableMetadata_out" %in% names(paramLs))
+ outLs[["varDF"]] <- readTableF(paramLs[["variableMetadata_out"]], "dataframe")
+ if("information" %in% names(paramLs))
+ outLs[["infVc"]] <- readLines(paramLs[["information"]])
+
+ return(outLs)
+}
+
+## Setting default parameters
+defaultArgF <- function(testInDirC) {
+
+ defaultArgLs <- list()
+ if(file.exists(file.path(dirname(scriptPathC), testInDirC, "dataMatrix.tsv")))
+ defaultArgLs[["dataMatrix_in"]] <- file.path(dirname(scriptPathC), testInDirC, "dataMatrix.tsv")
+ if(file.exists(file.path(dirname(scriptPathC), testInDirC, "sampleMetadata.tsv")))
+ defaultArgLs[["sampleMetadata_in"]] <- file.path(dirname(scriptPathC), testInDirC, "sampleMetadata.tsv")
+ if(file.exists(file.path(dirname(scriptPathC), testInDirC, "variableMetadata.tsv")))
+ defaultArgLs[["variableMetadata_in"]] <- file.path(dirname(scriptPathC), testInDirC, "variableMetadata.tsv")
+
+ defaultArgLs[["sampleMetadata_out"]] <- file.path(dirname(scriptPathC), testOutDirC, "sampleMetadata.tsv")
+ defaultArgLs[["variableMetadata_out"]] <- file.path(dirname(scriptPathC), testOutDirC, "variableMetadata.tsv")
+ defaultArgLs[["figure"]] <- file.path(dirname(scriptPathC), testOutDirC, "figure.pdf")
+ defaultArgLs[["information"]] <- file.path(dirname(scriptPathC), testOutDirC, "information.txt")
+
+ defaultArgLs
+
+}
+
+## Main
+##-----
+
+## Create output folder
+file.exists(testOutDirC) || dir.create(testOutDirC)
+
+## Run tests
+test.suite <- defineTestSuite('tests', dirname(scriptPathC), testFileRegexp = paste0('^.*_tests\\.R$'), testFuncRegexp = '^.*$')
+isValidTestSuite(test.suite)
+test.results <- runTestSuite(test.suite)
+print(test.results)
+
+
diff -r 000000000000 -r b4f5b5bc01dd runit/qualitymetrics_tests.R
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/runit/qualitymetrics_tests.R Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,14 @@
+test_input_default <- function() {
+
+ testDirC <- "input"
+ argLs <- list(CV = "FALSE",
+ Compa = "TRUE",
+ seuil = 1,
+ poolAsPool1L = "TRUE")
+
+ argLs <- c(defaultArgF(testDirC), argLs)
+ outLs <- wrapperCallF(argLs)
+
+ checkEqualsNumeric(outLs[["varDF"]]["met_033", "blankMean_over_sampleMean"], 0.004417387, tolerance = 1e-6)
+
+}
diff -r 000000000000 -r b4f5b5bc01dd static/images/QualityControl.png
Binary file static/images/QualityControl.png has changed
diff -r 000000000000 -r b4f5b5bc01dd static/images/qualitymetrics_workingExampleImage.png
Binary file static/images/qualitymetrics_workingExampleImage.png has changed
diff -r 000000000000 -r b4f5b5bc01dd test-data/input-dataMatrix.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/input-dataMatrix.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+dataMatrix QC_4 sam_44 sam_18 sam_23 blk_3 sam_9 sam_22 QC_6 blk_4
+met_031 5601185.9 4446133.4 4144765.4 3085899.9 NA 6748534.9 5819543.8 3256720.3 NA
+met_032 4.07 4.08 4.11 4.1 NA 4.04 4.13 4.11 NA
+met_033 1448205184 1456986135 993364802.3 1162711600 5569143.2 1043559922 1465003454 1052094028 5247494.3
+met_034 4.11 4.21 4.18 4.1 4.09 4.1 4.14 4.11 4.08
+met_035 3777580.7 2296751 1890711.7 1767424.6 6567.5 1906253.5 3043253.9 2856958.5 7940.8
+met_036 4.12 4.21 4.26 4.1 4.11 4.22 4.27 4.12 4.2
+met_037 4982658.7 3751181.8 4219033.2 2425759.9 NA 11978184.4 4306459.5 3352187 NA
+met_038 4.45 4.38 4.4 4.4 NA 4.44 4.46 4.32 NA
+met_039 6658087.7 3231434.7 2932986.5 4098788.3 NA 3691132.6 6108614.4 4541941.9 NA
+met_040 4.49 4.56 4.48 4.5 NA 4.45 4.54 4.46 NA
diff -r 000000000000 -r b4f5b5bc01dd test-data/input-sampleMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/input-sampleMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,10 @@
+sampleMetadata injectionOrder batch sampleType
+QC_4 19 batch1 pool
+sam_44 20 batch1 sample
+sam_18 23 batch1 sample
+sam_23 27 batch1 sample
+blk_3 31 batch1 blank
+sam_9 34 batch1 sample
+sam_22 38 batch1 sample
+QC_6 42 batch1 pool
+blk_4 43 batch1 blank
diff -r 000000000000 -r b4f5b5bc01dd test-data/input-variableMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/input-variableMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+variableMetadata number
+met_031 31
+met_032 32
+met_033 33
+met_034 34
+met_035 35
+met_036 36
+met_037 37
+met_038 38
+met_039 39
+met_040 40
diff -r 000000000000 -r b4f5b5bc01dd test-data/output-sampleMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output-sampleMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,10 @@
+sampleMetadata injectionOrder batch sampleType hotelling_pval missing_pval decile_pval
+QC_4 19 batch1 pool 0.656135121662705 0.614294664663482 0.152779336765535
+sam_44 20 batch1 sample 0.465535744005279 0.614294664663482 0.0519209513268233
+sam_18 23 batch1 sample 0.851371602675381 0.614294664663482 0.183084409434003
+sam_23 27 batch1 sample 0.672662842796115 0.614294664663482 0.274841934352591
+blk_3 31 batch1 blank 0.55999189346843 0.0777598964393293 0.0482773048447409
+sam_9 34 batch1 sample 0.181937097726123 0.614294664663482 0.0612632186177473
+sam_22 38 batch1 sample 0.32958046545209 0.614294664663482 0.183084409434003
+QC_6 42 batch1 pool 0.568077738575854 0.614294664663482 0.430072456425572
+blk_4 43 batch1 blank 0.648119964672735 0.0777598964393293 0.0491375964282466
diff -r 000000000000 -r b4f5b5bc01dd test-data/output-variableMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/output-variableMetadata.tsv Sat Aug 06 12:01:17 2016 -0400
@@ -0,0 +1,11 @@
+variableMetadata number blank_mean blank_sd blank_CV sample_mean sample_sd sample_CV blankMean_over_sampleMean pool_mean pool_sd pool_CV poolCV_over_sampleCV
+met_031 31 NA NA NA 4848975.48 1441956.82432615 0.297373502974725 NA 4428953.1 1657787.52401859 0.37430685911273 1.25870951974004
+met_032 32 NA NA NA 4.092 0.0342052627529741 0.008359057368762 NA 4.09 0.0282842712474619 0.00691546974265573 0.827302581807729
+met_033 33 5408318.75 227440.118351194 0.0420537562345404 1224325182.66 224650928.867811 0.183489592511467 0.00441738749361485 1250149606 280092884.511242 0.22404749252966 1.22103651473126
+met_034 34 4.085 0.00707106781186532 0.00173098355247621 4.146 0.048785243670602 0.0117668219176561 0.985287023637241 4.11 0 0 0
+met_035 35 7254.15 971.069742603486 0.133864028535871 2180878.94 521458.904577554 0.239104929216087 0.00332625065378457 3317269.6 650978.200530878 0.196239160221068 0.820724026327874
+met_036 36 4.155 0.0636396103067892 0.0153163923722718 4.212 0.0676017751246223 0.0160498041606416 0.986467236467237 4.12 0 0 0
+met_037 37 NA NA NA 5336123.76 3788381.19013789 0.70995002374868 NA 4167422.85 1152917.59560276 0.276650015393268 0.389675337895617
+met_038 38 NA NA NA 4.416 0.03286335345031 0.00744188257479845 NA 4.385 0.0919238815542511 0.0209632569108896 2.81692927833619
+met_039 39 NA NA NA 4012591.3 1252979.510233 0.312261931643275 NA 5600014.8 1496341.04515943 0.26720305188469 0.855701655589386
+met_040 40 NA NA NA 4.506 0.0444971909225737 0.0098750978523244 NA 4.475 0.0212132034355966 0.00474038065599924 0.480033790741977