Repository 'msnbase_readmsdata'
hg clone https://toolshed.g2.bx.psu.edu/repos/lecorguille/msnbase_readmsdata

Changeset 0:728ebc7ae7dd (2018-09-18)
Next changeset 1:98aff9eca8b7 (2018-09-19)
Commit message:
planemo upload for repository https://github.com/workflow4metabolomics/xcms commit 9f72e947d9c241d11221cad561f3525d27231857
added:
lib-xcms3.x.x.r
lib.r
macros.xml
macros_msnbase.xml
msnbase_readmsdata.r
msnbase_readmsdata.xml
repository_dependencies.xml
static/images/msnbase_readmsdata_workflow.png
test-data/MM14.mzML
test-data/faahKO_reduce.zip
test-data/ko15.CDF
test-data/sampleMetadata.tsv
b
diff -r 000000000000 -r 728ebc7ae7dd lib-xcms3.x.x.r
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib-xcms3.x.x.r Tue Sep 18 16:04:51 2018 -0400
[
@@ -0,0 +1,152 @@
+
+
+#@TODO: remove this function as soon as we can use xcms 3.x.x from Bioconductor 3.7
+# https://github.com/sneumann/xcms/issues/250
+groupnamesW4M <- function(xdata, mzdec = 0, rtdec = 0) {
+    mzfmt <- paste("%.", mzdec, "f", sep = "")
+    rtfmt <- paste("%.", rtdec, "f", sep = "")
+
+    gnames <- paste("M", sprintf(mzfmt, featureDefinitions(xdata)[,"mzmed"]), "T",
+                    sprintf(rtfmt, featureDefinitions(xdata)[,"rtmed"]), sep = "")
+
+    if (any(dup <- duplicated(gnames)))
+        for (dupname in unique(gnames[dup])) {
+            dupidx <- which(gnames == dupname)
+            gnames[dupidx] <- paste(gnames[dupidx], seq(along = dupidx), sep = "_")
+        }
+
+    return (gnames)
+}
+
+#@TODO: remove this function as soon as we can use xcms 3.x.x from Bioconductor 3.7
+# https://github.com/sneumann/xcms/issues/247
+.concatenate_XCMSnExp <- function(...) {
+    x <- list(...)
+    if (length(x) == 0)
+        return(NULL)
+    if (length(x) == 1)
+        return(x[[1]])
+    ## Check that all are XCMSnExp objects.
+    if (!all(unlist(lapply(x, function(z) is(z, "XCMSnExp")))))
+        stop("All passed objects should be 'XCMSnExp' objects")
+    new_x <- as(.concatenate_OnDiskMSnExp(...), "XCMSnExp")
+    ## If any of the XCMSnExp has alignment results or detected features drop
+    ## them!
+    x <- lapply(x, function(z) {
+        if (hasAdjustedRtime(z)) {
+            z <- dropAdjustedRtime(z)
+            warning("Adjusted retention times found, had to drop them.")
+        }
+        if (hasFeatures(z)) {
+            z <- dropFeatureDefinitions(z)
+            warning("Feature definitions found, had to drop them.")
+        }
+        z
+    })
+    ## Combine peaks
+    fls <- lapply(x, fileNames)
+    startidx <- cumsum(lengths(fls))
+    pks <- lapply(x, chromPeaks)
+    procH <- lapply(x, processHistory)
+    for (i in 2:length(fls)) {
+        pks[[i]][, "sample"] <- pks[[i]][, "sample"] + startidx[i - 1]
+        procH[[i]] <- lapply(procH[[i]], function(z) {
+            z@fileIndex <- as.integer(z@fileIndex + startidx[i - 1])
+            z
+            })
+    }
+    pks <- do.call(rbind, pks)
+    new_x@.processHistory <- unlist(procH)
+    chromPeaks(new_x) <- pks
+    if (validObject(new_x))
+        new_x
+}
+
+#@TODO: remove this function as soon as we can use xcms 3.x.x from Bioconductor 3.7
+# https://github.com/sneumann/xcms/issues/247
+.concatenate_OnDiskMSnExp <- function(...) {
+    x <- list(...)
+    if (length(x) == 0)
+        return(NULL)
+    if (length(x) == 1)
+        return(x[[1]])
+    ## Check that all are XCMSnExp objects.
+    if (!all(unlist(lapply(x, function(z) is(z, "OnDiskMSnExp")))))
+        stop("All passed objects should be 'OnDiskMSnExp' objects")
+    ## Check processingQueue
+    procQ <- lapply(x, function(z) z@spectraProcessingQueue)
+    new_procQ <- procQ[[1]]
+    is_ok <- unlist(lapply(procQ, function(z)
+        !is.character(all.equal(new_procQ, z))
+        ))
+    if (any(!is_ok)) {
+        warning("Processing queues from the submitted objects differ! ",
+                "Dropping the processing queue.")
+        new_procQ <- list()
+    }
+    ## processingData
+    fls <- lapply(x, function(z) z@processingData@files)
+    startidx <- cumsum(lengths(fls))
+    ## featureData
+    featd <- lapply(x, fData)
+    ## Have to update the file index and the spectrum names.
+    for (i in 2:length(featd)) {
+        featd[[i]]$fileIdx <- featd[[i]]$fileIdx + startidx[i - 1]
+        rownames(featd[[i]]) <- MSnbase:::formatFileSpectrumNames(
+                                              fileIds = featd[[i]]$fileIdx,
+                                              spectrumIds = featd[[i]]$spIdx,
+                                              nSpectra = nrow(featd[[i]]),
+                                              nFiles = length(unlist(fls))
+                                          )
+    }
+    featd <- do.call(rbind, featd)
+    featd$spectrum <- 1:nrow(featd)
+    ## experimentData
+    expdata <- lapply(x, function(z) {
+        ed <- z@experimentData
+        data.frame(instrumentManufacturer = ed@instrumentManufacturer,
+                   instrumentModel = ed@instrumentModel,
+                   ionSource = ed@ionSource,
+                   analyser = ed@analyser,
+                   detectorType = ed@detectorType,
+                   stringsAsFactors = FALSE)
+    })
+    expdata <- do.call(rbind, expdata)
+    expdata <- new("MIAPE",
+                   instrumentManufacturer = expdata$instrumentManufacturer,
+                   instrumentModel = expdata$instrumentModel,
+                   ionSource = expdata$ionSource,
+                   analyser = expdata$analyser,
+                   detectorType = expdata$detectorType)
+
+    ## protocolData
+    protodata <- lapply(x, function(z) z@protocolData)
+    if (any(unlist(lapply(protodata, nrow)) > 0))
+        warning("Found non-empty protocol data, but merging protocol data is",
+                " currently not supported. Skipped.")
+    ## phenoData
+    pdata <- do.call(rbind, lapply(x, pData))
+    res <- new(
+        "OnDiskMSnExp",
+        phenoData = new("NAnnotatedDataFrame", data = pdata),
+        featureData = new("AnnotatedDataFrame", featd),
+        processingData = new("MSnProcess",
+                             processing = paste0("Concatenated [", date(), "]"),
+                             files = unlist(fls), smoothed = NA),
+        experimentData = expdata,
+        spectraProcessingQueue = new_procQ)
+    if (validObject(res))
+        res
+}
+
+#@TODO: remove this function as soon as we can use xcms 3.x.x from Bioconductor 3.7
+# https://github.com/sneumann/xcms/issues/247
+c.XCMSnExp <- function(...) {
+    .concatenate_XCMSnExp(...)
+}
+
+#@TODO: remove this function as soon as we can use xcms 3.x.x from Bioconductor 3.7
+# https://github.com/sneumann/xcms/issues/247
+c.MSnbase <- function(...) {
+    .concatenate_OnDiskMSnExp(...)
+}
b
diff -r 000000000000 -r 728ebc7ae7dd lib.r
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib.r Tue Sep 18 16:04:51 2018 -0400
[
b'@@ -0,0 +1,510 @@\n+#@authors ABiMS TEAM, Y. Guitton\n+# lib.r for Galaxy Workflow4Metabolomics xcms tools\n+\n+#@author G. Le Corguille\n+# solve an issue with batch if arguments are logical TRUE/FALSE\n+parseCommandArgs <- function(...) {\n+    args <- batch::parseCommandArgs(...)\n+    for (key in names(args)) {\n+        if (args[key] %in% c("TRUE","FALSE"))\n+            args[key] = as.logical(args[key])\n+    }\n+    return(args)\n+}\n+\n+#@author G. Le Corguille\n+# This function will\n+# - load the packages\n+# - display the sessionInfo\n+loadAndDisplayPackages <- function(pkgs) {\n+    for(pkg in pkgs) suppressPackageStartupMessages( stopifnot( library(pkg, quietly=TRUE, logical.return=TRUE, character.only=TRUE)))\n+\n+    sessioninfo = sessionInfo()\n+    cat(sessioninfo$R.version$version.string,"\\n")\n+    cat("Main packages:\\n")\n+    for (pkg in names(sessioninfo$otherPkgs)) { cat(paste(pkg,packageVersion(pkg)),"\\t") }; cat("\\n")\n+    cat("Other loaded packages:\\n")\n+    for (pkg in names(sessioninfo$loadedOnly)) { cat(paste(pkg,packageVersion(pkg)),"\\t") }; cat("\\n")\n+}\n+\n+#@author G. Le Corguille\n+# This function merge several chromBPI or chromTIC into one.\n+mergeChrom <- function(chrom_merged, chrom) {\n+    if (is.null(chrom_merged)) return(NULL)\n+    chrom_merged@.Data <- cbind(chrom_merged@.Data, chrom@.Data)\n+    return(chrom_merged)\n+}\n+\n+#@author G. Le Corguille\n+# This function merge several xdata into one.\n+mergeXData <- function(args) {\n+    chromTIC <- NULL\n+    chromBPI <- NULL\n+    chromTIC_adjusted <- NULL\n+    chromBPI_adjusted <- NULL\n+    for(image in args$images) {\n+\n+        load(image)\n+        # Handle infiles\n+        if (!exists("singlefile")) singlefile <- NULL\n+        if (!exists("zipfile")) zipfile <- NULL\n+        rawFilePath <- getRawfilePathFromArguments(singlefile, zipfile, args)\n+        zipfile <- rawFilePath$zipfile\n+        singlefile <- rawFilePath$singlefile\n+        retrieveRawfileInTheWorkingDirectory(singlefile, zipfile)\n+\n+        if (exists("raw_data")) xdata <- raw_data\n+        if (!exists("xdata")) stop("\\n\\nERROR: The RData doesn\'t contain any object called \'xdata\'. This RData should have been created by an old version of XMCS 2.*")\n+\n+        cat(sampleNamesList$sampleNamesOrigin,"\\n")\n+\n+        if (!exists("xdata_merged")) {\n+            xdata_merged <- xdata\n+            singlefile_merged <- singlefile\n+            md5sumList_merged <- md5sumList\n+            sampleNamesList_merged <- sampleNamesList\n+            chromTIC_merged <- chromTIC\n+            chromBPI_merged <- chromBPI\n+            chromTIC_adjusted_merged <- chromTIC_adjusted\n+            chromBPI_adjusted_merged <- chromBPI_adjusted\n+        } else {\n+            if (is(xdata, "XCMSnExp")) xdata_merged <- c(xdata_merged,xdata)\n+            else if (is(xdata, "OnDiskMSnExp")) xdata_merged <- .concatenate_OnDiskMSnExp(xdata_merged,xdata)\n+            else stop("\\n\\nERROR: The RData either a OnDiskMSnExp object called raw_data or a XCMSnExp object called xdata")\n+\n+            singlefile_merged <- c(singlefile_merged,singlefile)\n+            md5sumList_merged$origin <- rbind(md5sumList_merged$origin,md5sumList$origin)\n+            sampleNamesList_merged$sampleNamesOrigin <- c(sampleNamesList_merged$sampleNamesOrigin,sampleNamesList$sampleNamesOrigin)\n+            sampleNamesList_merged$sampleNamesMakeNames <- c(sampleNamesList_merged$sampleNamesMakeNames,sampleNamesList$sampleNamesMakeNames)\n+            chromTIC_merged <- mergeChrom(chromTIC_merged, chromTIC)\n+            chromBPI_merged <- mergeChrom(chromBPI_merged, chromBPI)\n+            chromTIC_adjusted_merged <- mergeChrom(chromTIC_adjusted_merged, chromTIC_adjusted)\n+            chromBPI_adjusted_merged <- mergeChrom(chromBPI_adjusted_merged, chromBPI_adjusted)\n+        }\n+    }\n+    rm(image)\n+    xdata <- xdata_merged; rm(xdata_merged)\n+    singlefile <- singlefile_merged; rm(singlefile_merged)\n+    md5sumList <- md5sumList_merged; rm(md5sumList_merged)\n+    sampleNa'..b'(files)))\n+}\n+\n+\n+# This function get the raw file path from the arguments\n+#@author Gildas Le Corguille lecorguille@sb-roscoff.fr\n+getRawfilePathFromArguments <- function(singlefile, zipfile, args, prefix="") {\n+  if (!(prefix %in% c("","Positive","Negative","MS1","MS2"))) stop("prefix must be either \'\', \'Positive\', \'Negative\', \'MS1\' or \'MS2\'")\n+\n+  if (!is.null(args[[paste0("zipfile",prefix)]])) zipfile <- args[[paste0("zipfile",prefix)]]\n+\n+  if (!is.null(args[[paste0("singlefile_galaxyPath",prefix)]])) {\n+    singlefile_galaxyPaths <- args[[paste0("singlefile_galaxyPath",prefix)]]\n+    singlefile_sampleNames <- args[[paste0("singlefile_sampleName",prefix)]]\n+  }\n+  if (exists("singlefile_galaxyPaths")){\n+    singlefile_galaxyPaths <- unlist(strsplit(singlefile_galaxyPaths,"\\\\|"))\n+    singlefile_sampleNames <- unlist(strsplit(singlefile_sampleNames,"\\\\|"))\n+\n+    singlefile <- NULL\n+    for (singlefile_galaxyPath_i in seq(1:length(singlefile_galaxyPaths))) {\n+      singlefile_galaxyPath <- singlefile_galaxyPaths[singlefile_galaxyPath_i]\n+      singlefile_sampleName <- singlefile_sampleNames[singlefile_galaxyPath_i]\n+      # In case, an url is used to import data within Galaxy\n+      singlefile_sampleName <- tail(unlist(strsplit(singlefile_sampleName,"/")), n=1)\n+      singlefile[[singlefile_sampleName]] <- singlefile_galaxyPath\n+    }\n+  }\n+  return(list(zipfile=zipfile, singlefile=singlefile))\n+}\n+\n+# This function retrieve the raw file in the working directory\n+#   - if zipfile: unzip the file with its directory tree\n+#   - if singlefiles: set symlink with the good filename\n+#@author Gildas Le Corguille lecorguille@sb-roscoff.fr\n+retrieveRawfileInTheWorkingDirectory <- function(singlefile, zipfile) {\n+    if(!is.null(singlefile) && (length("singlefile")>0)) {\n+        for (singlefile_sampleName in names(singlefile)) {\n+            singlefile_galaxyPath <- singlefile[[singlefile_sampleName]]\n+            if(!file.exists(singlefile_galaxyPath)){\n+                error_message <- paste("Cannot access the sample:",singlefile_sampleName,"located:",singlefile_galaxyPath,". Please, contact your administrator ... if you have one!")\n+                print(error_message); stop(error_message)\n+            }\n+\n+            if (!suppressWarnings( try (file.link(singlefile_galaxyPath, singlefile_sampleName), silent=T)))\n+                file.copy(singlefile_galaxyPath, singlefile_sampleName)\n+\n+        }\n+        directory <- "."\n+\n+    }\n+    if(!is.null(zipfile) && (zipfile != "")) {\n+        if(!file.exists(zipfile)){\n+            error_message <- paste("Cannot access the Zip file:",zipfile,". Please, contact your administrator ... if you have one!")\n+            print(error_message)\n+            stop(error_message)\n+        }\n+\n+        #list all file in the zip file\n+        #zip_files <- unzip(zipfile,list=T)[,"Name"]\n+\n+        #unzip\n+        suppressWarnings(unzip(zipfile, unzip="unzip"))\n+\n+        #get the directory name\n+        suppressWarnings(filesInZip <- unzip(zipfile, list=T))\n+        directories <- unique(unlist(lapply(strsplit(filesInZip$Name,"/"), function(x) x[1])))\n+        directories <- directories[!(directories %in% c("__MACOSX")) & file.info(directories)$isdir]\n+        directory <- "."\n+        if (length(directories) == 1) directory <- directories\n+\n+        cat("files_root_directory\\t",directory,"\\n")\n+\n+    }\n+    return (directory)\n+}\n+\n+\n+# This function retrieve a xset like object\n+#@author Gildas Le Corguille lecorguille@sb-roscoff.fr\n+getxcmsSetObject <- function(xobject) {\n+    # XCMS 1.x\n+    if (class(xobject) == "xcmsSet")\n+        return (xobject)\n+    # XCMS 3.x\n+    if (class(xobject) == "XCMSnExp") {\n+        # Get the legacy xcmsSet object\n+        suppressWarnings(xset <- as(xobject, \'xcmsSet\'))\n+        if (!is.null(xset@phenoData$sample_group))\n+            sampclass(xset) <- xset@phenoData$sample_group\n+        else\n+            sampclass(xset) <- "."\n+        return (xset)\n+    }\n+}\n'
b
diff -r 000000000000 -r 728ebc7ae7dd macros.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/macros.xml Tue Sep 18 16:04:51 2018 -0400
[
@@ -0,0 +1,52 @@
+<?xml version="1.0"?>
+<macros>
+    <xml name="stdio">
+        <stdio>
+            <exit_code range="1" level="fatal" />
+        </stdio>
+    </xml>
+
+    <!-- COMMAND -->
+    <token name="@COMMAND_RSCRIPT@">LC_ALL=C Rscript $__tool_directory__/</token>
+
+    <token name="@COMMAND_LOG_EXIT@">
+        ;
+        return=\$?;
+        cat 'log.txt';
+        sh -c "exit \$return"
+    </token>
+
+    <!-- INPUT_VALIDATORS -->
+    <xml name="input_validator_range_integer">
+        <validator type="regex" message="The format is 'min,max'" >[0-9]+ *, *[0-9]+</validator>
+    </xml>
+
+    <xml name="input_validator_range_float">
+        <validator type="regex" message="The format is 'min,max'" >[0-9]+\.?[0-9]* *, *[0-9]+\.?[0-9]*</validator>
+    </xml>
+
+    <xml name="input_validator_list_integer">
+        <validator type="regex" message="The format is '1,2,4,6'" >[0-9, ]+</validator>
+    </xml>
+
+
+    <token name="@INPUT_IMAGE_LABEL@">RData file</token>
+    <token name="@INPUT_IMAGE_HELP@">It contains a xcms3::XCMSnExp object (named xdata)</token>
+
+
+    <!-- MISC -->
+    <token name="@HELP_AUTHORS_WRAPPERS@">
+
+.. class:: infomark
+
+**Galaxy integration** ABiMS TEAM - SU/CNRS - Station biologique de Roscoff and Yann Guitton - LABERCA
+Part of Workflow4Metabolomics.org [W4M]
+
+ | Contact support@workflow4metabolomics.org for any questions or concerns about the Galaxy implementation of this tool.
+
+    </token>
+
+    <xml name="citation_w4m">
+            <citation type="doi">10.1093/bioinformatics/btu813</citation>
+    </xml>
+</macros>
b
diff -r 000000000000 -r 728ebc7ae7dd macros_msnbase.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/macros_msnbase.xml Tue Sep 18 16:04:51 2018 -0400
b
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<macros>
+    <token name="@WRAPPER_VERSION@">2.4.0</token>
+    <xml name="requirements">
+        <requirements>
+            <requirement type="package" version="@WRAPPER_VERSION@">bioconductor-msnbase</requirement>
+            <requirement type="package" version="1.1_4">r-batch</requirement>
+            <requirement type="package" version="6.0">unzip</requirement>
+            <yield />
+        </requirements>
+    </xml>
+
+    <!-- MISC -->
+    <token name="@HELP_AUTHORS@">
+.. class:: infomark
+
+**Authors**  Laurent Gatto, Johannes Rainer and Sebastian Gibb with contributions from Guangchuang Yu, Samuel Wieczorek, Vasile-Cosmin Lazar, Vladislav Petyuk, Thomas Naake, Richie Cotton and Martina Fisher.
+
+@HELP_AUTHORS_WRAPPERS@
+
+---------------------------------------------------
+
+    </token>
+
+    <xml name="citation">
+        <citations>
+            <citation type="doi">10.1093/bioinformatics/btr645</citation>
+            <expand macro="citation_w4m"/>
+        </citations>
+    </xml>
+</macros>
b
diff -r 000000000000 -r 728ebc7ae7dd msnbase_readmsdata.r
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/msnbase_readmsdata.r Tue Sep 18 16:04:51 2018 -0400
[
@@ -0,0 +1,99 @@
+#!/usr/bin/env Rscript
+
+# ----- LOG FILE -----
+log_file <- file("log.txt", open="wt")
+sink(log_file)
+sink(log_file, type = "output")
+
+
+# ----- PACKAGE -----
+cat("\tSESSION INFO\n")
+
+#Import the different functions
+source_local <- function(fname){ argv <- commandArgs(trailingOnly=FALSE); base_dir <- dirname(substring(argv[grep("--file=", argv)], 8)); source(paste(base_dir, fname, sep="/")) }
+source_local("lib.r")
+source_local("lib-xcms3.x.x.r")
+
+pkgs <- c("MSnbase","batch")
+loadAndDisplayPackages(pkgs)
+cat("\n\n");
+
+
+# ----- ARGUMENTS -----
+cat("\tARGUMENTS INFO\n")
+args <- parseCommandArgs(evaluate = FALSE) #interpretation of arguments given in command line as an R list of objects
+write.table(as.matrix(args), col.names=F, quote=F, sep='\t')
+
+cat("\n\n")
+
+
+# ----- PROCESSING INFILE -----
+cat("\tARGUMENTS PROCESSING INFO\n")
+
+
+cat("\n\n")
+
+# ----- INFILE PROCESSING -----
+cat("\tINFILE PROCESSING INFO\n")
+
+# Handle infiles
+if (!exists("singlefile")) singlefile <- NULL
+if (!exists("zipfile")) zipfile <- NULL
+rawFilePath <- getRawfilePathFromArguments(singlefile, zipfile, args)
+zipfile <- rawFilePath$zipfile
+singlefile <- rawFilePath$singlefile
+directory <- retrieveRawfileInTheWorkingDirectory(singlefile, zipfile)
+
+# Check some character issues
+md5sumList <- list("origin" = getMd5sum(directory))
+checkXmlStructure(directory)
+checkFilesCompatibilityWithXcms(directory)
+
+
+cat("\n\n")
+
+
+# ----- MAIN PROCESSING INFO -----
+cat("\tMAIN PROCESSING INFO\n")
+
+
+cat("\t\tCOMPUTE\n")
+
+## Get the full path to the files
+files <- getMSFiles(directory)
+
+cat("\t\t\tCreate a phenodata data.frame\n")
+s_groups <- sapply(files, function(x) tail(unlist(strsplit(dirname(x),"/")), n=1))
+s_name <- tools::file_path_sans_ext(basename(files))
+pd <- data.frame(sample_name=s_name, sample_group=s_groups, stringsAsFactors=FALSE)
+print(pd)
+
+cat("\t\t\tLoad Raw Data\n")
+raw_data <- readMSData(files=files, pdata = new("NAnnotatedDataFrame", pd), mode="onDisk")
+
+# Transform the files absolute pathways into relative pathways
+raw_data@processingData@files <- sub(paste(getwd(), "/", sep="") , "", raw_data@processingData@files)
+
+# Create a sampleMetada file
+sampleNamesList <- getSampleMetadata(xdata=raw_data, sampleMetadataOutput="sampleMetadata.tsv")
+
+cat("\t\t\tCompute and Store TIC and BPI\n")
+chromTIC <- chromatogram(raw_data, aggregationFun = "sum")
+chromBPI <- chromatogram(raw_data, aggregationFun = "max")
+
+cat("\n\n")
+
+# ----- EXPORT -----
+
+cat("\tMSnExp OBJECT INFO\n")
+print(raw_data)
+cat("\t\tphenoData\n")
+print(raw_data@phenoData@data)
+cat("\n\n")
+
+#saving R data in .Rdata file to save the variables used in the present tool
+objects2save <- c("raw_data", "zipfile", "singlefile", "md5sumList", "sampleNamesList", "chromTIC", "chromBPI")
+save(list=objects2save[objects2save %in% ls()], file="readmsdata.RData")
+
+
+cat("\tDONE\n")
b
diff -r 000000000000 -r 728ebc7ae7dd msnbase_readmsdata.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/msnbase_readmsdata.xml Tue Sep 18 16:04:51 2018 -0400
[
b'@@ -0,0 +1,241 @@\n+<tool id="msnbase_readmsdata" name="MSnbase readMSData" version="@WRAPPER_VERSION@.0">\n+    <description>Imports mass-spectrometry raw data files</description>\n+\n+    <macros>\n+        <import>macros.xml</import>\n+        <import>macros_msnbase.xml</import>\n+    </macros>\n+\n+    <expand macro="requirements"/>\n+    <expand macro="stdio"/>\n+\n+    <command><![CDATA[\n+        @COMMAND_RSCRIPT@/msnbase_readmsdata.r\n+\n+        #if $input.is_of_type("mzxml") or $input.is_of_type("mzml") or $input.is_of_type("mzdata") or $input.is_of_type("netcdf"):\n+            singlefile_galaxyPath \'$input\' singlefile_sampleName \'$input.name\'\n+        #else\n+            zipfile \'$input\'\n+        #end if\n+\n+        @COMMAND_LOG_EXIT@\n+    ]]></command>\n+\n+    <inputs>\n+\n+        <param name="input" type="data" format="mzxml,mzml,mzdata,netcdf,no_unzip.zip,zip" label="File(s) from your history containing your chromatograms" help="Single file mode for the following formats: mzxml, mzml, mzdata and netcdf. Zip file mode for the following formats: no_unzip.zip, zip. See the help section below." />\n+\n+    </inputs>\n+\n+    <outputs>\n+        <data name="xsetRData" format="rdata.msnbase.raw" label="${input.name.rsplit(\'.\',1)[0]}.raw.RData" from_work_dir="readmsdata.RData" />\n+        <data name="sampleMetadata" format="tabular" label="${input.name.rsplit(\'.\',1)[0]}.sampleMetadata.tsv" from_work_dir="sampleMetadata.tsv" >\n+            <filter>input.extension not in ["mzxml","mzml","mzdata","netcdf"]</filter>\n+        </data>\n+    </outputs>\n+\n+    <tests>\n+\n+        <test>\n+            <param name="input" value="faahKO_reduce.zip"  ftype="zip" />\n+            <assert_stdout>\n+                <has_text text="rowNames: faahKO_reduce/KO/ko15.CDF faahKO_reduce/KO/ko16.CDF" />\n+                <has_text text="faahKO_reduce/WT/wt15.CDF faahKO_reduce/WT/wt16.CDF" />\n+                <has_text text="featureNames: F1.S0001 F1.S0002 ... F4.S1278 (5112 total)" />\n+                <has_text text="fvarLabels: fileIdx spIdx ... spectrum (27 total)" />\n+                <has_text text="faahKO_reduce/KO/ko15.CDF        ko15           KO" />\n+                <has_text text="faahKO_reduce/KO/ko16.CDF        ko16           KO" />\n+                <has_text text="faahKO_reduce/WT/wt15.CDF        wt15           WT" />\n+                <has_text text="faahKO_reduce/WT/wt16.CDF        wt16           WT" />\n+            </assert_stdout>\n+            <output name="sampleMetadata" value="sampleMetadata.tsv" />\n+        </test>\n+        <test>\n+            <param name="input" value="ko15.CDF"  ftype="netcdf" />\n+            <assert_stdout>\n+                <has_text text="rowNames: ./ko15.CDF" />\n+                <has_text text="ko15.CDF" />\n+                <has_text text="featureNames: F1.S0001 F1.S0002 ... F1.S1278 (1278 total)" />\n+                <has_text text="fvarLabels: fileIdx spIdx ... spectrum (27 total)" />\n+                <has_text text="./ko15.CDF        ko15            ." />\n+            </assert_stdout>\n+        </test>\n+        <!-- DISABLE FOR TRAVIS\n+        Useful to generate test-data for the further steps-->\n+        <test>\n+            <param name="input" value="ko16.CDF"  ftype="netcdf" />\n+            <assert_stdout>\n+                <has_text text="rowNames: ./ko16.CDF" />\n+                <has_text text="ko16.CDF" />\n+                <has_text text="./ko16.CDF        ko16            ." />\n+            </assert_stdout>\n+        </test>\n+        <test>\n+            <param name="input" value="wt15.CDF"  ftype="netcdf" />\n+            <assert_stdout>\n+                <has_text text="rowNames: ./wt15.CDF" />\n+                <has_text text="wt15.CDF" />\n+                <has_text text="./wt15.CDF        wt15            ." />\n+            </assert_stdout>\n+        </test>\n+        <test>\n+            <param name="input" value="wt16.CDF"  ftype="netcdf" />\n+            <assert_stdout>\n+                <has_text text="rowNames: ./wt16.CDF" />\n+ '..b'te a Dataset Collection of the type List\n+\n+**Downstream tools**\n+\n+=========================== ==================== ====================\n+Name                        Output file          Format\n+=========================== ==================== ====================\n+xcms.findChromPeaks         ``*``.raw.RData      rdata.msnbase.raw\n+=========================== ==================== ====================\n+\n+ \n+\n+**Example of a metabolomic workflow**\n+\n+.. image:: msnbase_readmsdata_workflow.png\n+\n+---------------------------------------------------\n+\n+\n+\n+-----------\n+Input files\n+-----------\n+\n+=========================== ==================================\n+Parameter : num + label     Format\n+=========================== ==================================\n+OR : Zip file               zip\n+--------------------------- ----------------------------------\n+OR : Single file            mzXML, mzML, mzData, netCDF\n+=========================== ==================================\n+\n+**Choose your inputs**\n+\n+You have two methods for your inputs:\n+\n+    | Single file (recommended): You can put a single file as input. That way, you will be able to launch several readMSData and findChromPeaks in parallel and use "findChromPeaks Merger" before groupChromPeaks. \n+    | Zip file: You can put a zip file containing your inputs: myinputs.zip (containing all your conditions as sub-directories).\n+\n+Zip file: Steps for creating the zip file\n+-----------------------------------------\n+\n+**Step1: Creating your directory and hierarchize the subdirectories**\n+\n+\n+VERY IMPORTANT: If you zip your files under Windows, you must use the 7Zip_ software, otherwise your zip will not be well unzipped on the W4M platform (corrupted zip bug).\n+\n+.. _7Zip: http://www.7-zip.org/\n+\n+Your zip should contain all your conditions as sub-directories. For example, two conditions (mutant and wild):\n+arabidopsis/wild/01.raw\n+arabidopsis/mutant/01.raw\n+\n+**Step2: Creating a zip file**\n+\n+Create your zip file (*e.g.* arabidopsis.zip).\n+\n+**Step 3 : Uploading it to our Galaxy server**\n+\n+If your zip file is less than 2Gb, you can use the Get Data tool to upload it.\n+\n+Otherwise if your zip file is larger than 2Gb, please refer to the HOWTO_ on workflow4metabolomics.org.\n+\n+.. _HOWTO: http://application.sb-roscoff.fr/download/w4m/howto/galaxy_upload_up_2Go.pdf\n+\n+For more information, do not hesitate to send us an email at supportATworkflow4metabolomics.org.\n+\n+Advices for converting your files into mzXML format (XCMS input)\n+----------------------------------------------------------------\n+\n+We recommend you to convert your raw files into **mzXML** in centroid mode (smaller files); this way the files will be compatible with the xmcs centWave algorithm.\n+\n+**We recommend you the following parameters:**\n+\n+Use Filtering: **True**\n+\n+Use Peak Picking: **True**\n+\n+Peak Peaking -Apply to MS Levels: **All Levels (1-)** : Centroid Mode\n+\n+Use zlib: **64**\n+\n+Binary Encoding: **64**\n+\n+m/z Encoding: **64**\n+\n+Intensity Encoding: **64**\n+\n+\n+------------\n+Output files\n+------------\n+\n+xset.RData: rdata.msnbase.raw format\n+\n+    | Rdata file that is necessary in the second step of the workflow "xcms.findChromPeaks".\n+\n+sampleMetadata.tsv (only when a zip is used)\n+\n+    | Tabular file that contains for each sample its associated class and polarity (positive,negative and mixed).\n+    | This file is necessary in further steps of the workflow, as the Anova and PCA steps for example.\n+    | You get a sampleMetadata.tsv only if you use a zip. Otherwise, you have to provide one for the findChromPeaks Merger step. \n+\n+---------------------------------------------------\n+\n+Changelog/News\n+--------------\n+\n+\n+**Version 2.4.0.0 - 29/03/2018**\n+\n+- NEW: a new dedicated tool to read the raw data. This function was previously included in xcms.findChromPeaks. This way, you will now be able to display TICs and BPCs before xcms.findChromPeaks.\n+\n+    ]]></help>\n+\n+    <expand macro="citation" />\n+</tool>\n'
b
diff -r 000000000000 -r 728ebc7ae7dd repository_dependencies.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/repository_dependencies.xml Tue Sep 18 16:04:51 2018 -0400
b
@@ -0,0 +1,5 @@
+<?xml version="1.0"?>
+<repositories>
+    <repository changeset_revision="7800ba9a4c1e" name="no_unzip_datatype" owner="lecorguille" toolshed="https://toolshed.g2.bx.psu.edu" />
+ <repository changeset_revision="d64562a4ebb3" name="rdata_xcms_datatypes" owner="lecorguille" toolshed="https://toolshed.g2.bx.psu.edu" />
+</repositories>
b
diff -r 000000000000 -r 728ebc7ae7dd static/images/msnbase_readmsdata_workflow.png
b
Binary file static/images/msnbase_readmsdata_workflow.png has changed
b
diff -r 000000000000 -r 728ebc7ae7dd test-data/MM14.mzML
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/MM14.mzML Tue Sep 18 16:04:51 2018 -0400
b
b'@@ -0,0 +1,3667 @@\n+<?xml version="1.0" encoding="ISO-8859-1"?>\n+<mzML xmlns="http://psi.hupo.org/ms/mzml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://psi.hupo.org/ms/mzml http://psidev.info/files/ms/mzML/xsd/mzML1.1.0.xsd" accession="" version="1.1">\n+\t<cvList count="2">\n+\t\t<cv id="MS" fullName="Proteomics Standards Initiative Mass Spectrometry Ontology" URI="http://psidev.cvs.sourceforge.net/*checkout*/psidev/psi/psi-ms/mzML/controlledVocabulary/psi-ms.obo"/>\n+\t\t<cv id="UO" fullName="Unit Ontology" URI="http://obo.cvs.sourceforge.net/obo/obo/ontology/phenotype/unit.obo"/>\n+\t</cvList>\n+\t<fileDescription>\n+\t\t<fileContent>\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000294" name="mass spectrum" />\n+\t\t</fileContent>\n+\t\t<sourceFileList count="1">\n+\t\t\t<sourceFile id="sf_ru_0" name="analysis.baf" location="MM14_20uM_2-A%2c4_01_1745.d/">\n+\t\t\t\t<cvParam cvRef="MS" accession="MS:1000569" name="SHA-1" value="" />\n+\t\t\t\t<cvParam cvRef="MS" accession="MS:1000564" name="PSI mzData file" />\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000824" name="no nativeID format" />\n+\t\t\t</sourceFile>\n+\t\t</sourceFileList>\n+\t\t<contact>\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000586" name="contact name" value="Customer, Bruker" />\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000590" name="contact organization" value="" />\n+\t\t</contact>\n+\t</fileDescription>\n+\t<sampleList count="1">\n+\t\t<sample id="sa_0" name="MM14_20uM">\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000004" name="sample mass" value="0"  unitAccession="UO:0000021" unitName="gram" unitCvRef="UO" />\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000005" name="sample volume" value="0" unitAccession="UO:0000098" unitName="milliliter" unitCvRef="UO" />\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000006" name="sample concentration" value="0" unitAccession="UO:0000175" unitName="gram per liter" unitCvRef="UO" />\n+\t\t</sample>\n+\t</sampleList>\n+\t<softwareList count="3">\n+\t\t<software id="so_in_0" version="" >\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000799" name="custom unreleased software tool" value="" />\n+\t\t</software>\n+\t\t<software id="so_dp_0" version="1.3.3" >\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000717" name="CompassXport" />\n+\t\t</software>\n+\t\t<software id="so_dp_1" version="1.4" >\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000756" name="FileConverter" />\n+\t\t</software>\n+\t</softwareList>\n+\t<instrumentConfigurationList count="1">\n+\t\t<instrumentConfiguration id="ic_0">\n+\t\t\t<cvParam cvRef="MS" accession="MS:1000031" name="instrument model" />\n+\t\t\t<componentList count="3">\n+\t\t\t\t<source order="0">\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000073" name="electrospray ionization" />\n+\t\t\t\t</source>\n+\t\t\t\t<analyzer order="0">\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000014" name="accuracy" value="0" unitAccession="UO:0000169" unitName="parts per million" unitCvRef="UO" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000022" name="TOF Total Path Length" value="0" unitAccession="UO:0000008" unitName="meter" unitCvRef="UO" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000024" name="final MS exponent" value="0" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000025" name="magnetic field strength" value="0" unitAccession="UO:0000228" unitName="tesla" unitCvRef="UO" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000443" name="mass analyzer type" />\n+\t\t\t\t</analyzer>\n+\t\t\t\t<detector order="0">\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000028" name="detector resolution" value="0" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000029" name="sampling frequency" value="0" unitAccession="UO:0000106" unitName="hertz" unitCvRef="UO" />\n+\t\t\t\t\t<cvParam cvRef="MS" accession="MS:1000026" name="detector type" />\n+\t\t\t\t</detector>\n+\t\t\t</componentList>\n+\t\t\t<softwareRef ref="so_in_0" />\n+\t\t</instrumentConfiguration>\n+\t</instrumentConfigurationList>\n+\t<dataProcessingList count="1">\n+\t\t<dataProcessing id="dp_ru_0">\n+\t\t\t<processingMethod order="0" softwareRef="so_dp_0">\n+\t\t\t\t<cvParam cvRef="MS" accession="MS:1000034" name="charge deconvolution" />\n+\t\t\t\t<cvP'..b'IBUQAAAAAAAAENAAAAAAAAAOUAAAAAAAAA6QAAAAAAAwFNAAAAAAACAQUAAAAAAAAA8QAAAAAAAADVAAAAAAAAAPkAAAAAAAABEQAAAAAAAAD9AAAAAAAAANkAAAAAAAAA8QAAAAAAAgEdAAAAAAAAAO0AAAAAAAABJQAAAAAAAADhAAAAAAAAAN0AAAAAAAAA5QAAAAAAAADVAAAAAAACAREAAAAAAAABFQAAAAAAAgEhAAAAAAAAAOkAAAAAAAAA1QAAAAAAAAEJAAAAAAAAANkAAAAAAAAA9QAAAAAAAADdAAAAAAAAAU0AAAAAAAAA8QAAAAAAAADtAAAAAAAAAP0AAAAAAAABFQAAAAAAAADVAAAAAAAAAP0AAAAAAAABDQAAAAAAAADtAAAAAAAAAOEAAAAAAAIBAQAAAAAAAADhAAAAAAAAAPUAAAAAAAAA3QAAAAAAAAD9AAAAAAAAAP0AAAAAAAAA4QAAAAAAAAEhAAAAAAAAAP0AAAAAAAABMQAAAAAAAAD1AAAAAAAAAP0AAAAAAAABCQAAAAAAAADZAAAAAAACAR0AAAAAAAAA+QAAAAAAAAE1AAAAAAAAAO0AAAAAAAAA+QAAAAAAAADlAAAAAAACAQEAAAAAAAAA4QAAAAAAAAEFAAAAAAAAAOkAAAAAAAABDQAAAAAAAADdAAAAAAAAANUAAAAAAAIBJQAAAAAAAgENAAAAAAACAQ0AAAAAAAIBCQAAAAAAAAEJAAAAAAAAAP0AAAAAAAEBTQAAAAAAAgEhAAAAAAAAAQUAAAAAAAEBQQAAAAAAAADdAAAAAAACAQkAAAAAAAAA8QAAAAAAAAD5AAAAAAAAAPkAAAAAAAABFQAAAAAAAAD9AAAAAAAAANUAAAAAAAIBPQAAAAAAAAD5AAAAAAAAAQEAAAAAAAAA/QAAAAAAAAENAAAAAAAAAO0AAAAAAAAA+QAAAAAAAAEBAAAAAAACAQEAAAAAAAIBIQAAAAAAAADZAAAAAAAAAPUAAAAAAAAA3QAAAAAAAAEZAAAAAAACAQ0AAAAAAAABDQAAAAAAAgEBAAAAAAAAANkAAAAAAAIBAQAAAAAAAADlAAAAAAAAAP0AAAAAAAAA9QAAAAAAAAD1AAAAAAACAUkAAAAAAAIBAQAAAAAAAgEJAAAAAAAAAPkAAAAAAAAA/QAAAAAAAAFBAAAAAAAAAOUAAAAAAAIBGQAAAAAAAgERAAAAAAACATEAAAAAAAABDQAAAAAAAAEFAAAAAAAAATkAAAAAAAIBGQAAAAAAAAD5AAAAAAAAAPUAAAAAAAAA4QAAAAAAAAEZAAAAAAAAAQEAAAAAAAABBQAAAAAAAgENAAAAAAAAAO0AAAAAAAAA9QAAAAAAAAD9AAAAAAAAAQEAAAAAAAIBAQAAAAAAAADhAAAAAAAAASEAAAAAAAAA6QAAAAAAAADlAAAAAAAAANUAAAAAAAABDQAAAAAAAADhAAAAAAAAAQ0AAAAAAAAA+QAAAAAAAAD5AAAAAAACATEAAAAAAAAA5QAAAAAAAgENAAAAAAAAAOkAAAAAAAAA1QAAAAAAAADxAAAAAAAAAOEAAAAAAAIBCQAAAAAAAAEBAAAAAAACAQkAAAAAAAABCQAAAAAAAgE1AAAAAAACAQUAAAAAAAIBFQAAAAAAAgEBAAAAAAAAAPkAAAAAAAABBQAAAAAAAADhAAAAAAAAANkAAAAAAAIBCQAAAAAAAAD5AAAAAAAAANkAAAAAAAAA6QAAAAAAAADpAAAAAAAAAPkAAAAAAAABHQAAAAAAAADlAAAAAAAAARUAAAAAAAAA2QAAAAAAAADVAAAAAAAAAQEAAAAAAAAA6QAAAAAAAADVAAAAAAAAAPEAAAAAAAAA5QAAAAAAAAD5AAAAAAACAREAAAAAAAAA9QAAAAAAAADlAAAAAAAAAPUAAAAAAAIBEQAAAAAAAAD9AAAAAAAAAOkAAAAAAAABFQAAAAAAAAD5AAAAAAAAAPkAAAAAAAAA9QAAAAAAAAEpAAAAAAAAAQ0AAAAAAAABLQAAAAAAAADhAAAAAAAAAOEAAAAAAAAA2QAAAAAAAAD1AAAAAAAAAO0AAAAAAAAA9QAAAAAAAgEFAAAAAAAAAOEAAAAAAAAA/QAAAAAAAADtAAAAAAAAAO0AAAAAAAAA3QAAAAAAAgE1AAAAAAAAAN0AAAAAAAAA2QAAAAAAAAD5AAAAAAAAAN0AAAAAAAABEQAAAAAAAADdAAAAAAAAANUAAAAAAAAA3QAAAAAAAADZAAAAAAAAAOUAAAAAAAAA4QAAAAAAAADhAAAAAAAAAQUAAAAAAAIBDQAAAAAAAAE1AAAAAAAAAOUAAAAAAAAA5QAAAAAAAAD5AAAAAAAAAPUAAAAAAAAA/QAAAAAAAAEBAAAAAAAAAN0AAAAAAAIBDQAAAAAAAADdAAAAAAACASUAAAAAAAABFQAAAAAAAAEJAAAAAAAAAPkAAAAAAAAA4QAAAAAAAAEJAAAAAAAAAP0AAAAAAAAA8QAAAAAAAADlAAAAAAACAQUAAAAAAAAA8QAAAAAAAADtAAAAAAAAAPEAAAAAAAABBQAAAAAAAADVAAAAAAAAARUAAAAAAAAA6QAAAAAAAAEdAAAAAAACAS0AAAAAAAIBMQAAAAAAAgEZAAAAAAAAAOUAAAAAAAAA4QAAAAAAAADpAAAAAAAAAN0AAAAAAAAA/QAAAAAAAgEJAAAAAAAAARkAAAAAAAAA4QAAAAAAAADhAAAAAAAAAOkAAAAAAAAA/QAAAAAAAgENAAAAAAAAAQEAAAAAAAAA4QAAAAAAAAExAAAAAAAAAPkAAAAAAAAA1QAAAAAAAgEdAAAAAAACAR0AAAAAAAIBCQAAAAAAAAExAAAAAAAAAOUAAAAAAAAA9QAAAAAAAAEdAAAAAAAAAP0AAAAAAAABDQAAAAAAAAENAAAAAAACAQ0AAAAAAAAA9QAAAAAAAAEBAAAAAAAAAQUAAAAAAAAA1QAAAAAAAAD1AAAAAAAAAPkAAAAAAAABCQAAAAAAAAEtAAAAAAAAAOUAAAAAAAAA1QAAAAAAAADVAAAAAAACAQ0AAAAAAAIBIQAAAAAAAAD9AAAAAAAAAOkAAAAAAAAA2QAAAAAAAAD1AAAAAAAAAOUAAAAAAAABEQAAAAAAAADpAAAAAAAAAOkAAAAAAAABCQAAAAAAAgEFAAAAAAAAANkAAAAAAAAA8QAAAAAAAAEFAAAAAAAAANkAAAAAAAABIQAAAAAAAADlAAAAAAAAAOkAAAAAAAAA5QAAAAAAAAEpAAAAAAAAAN0AAAAAAAAA7QAAAAAAAADxAAAAAAAAAQ0AAAAAAAAA5QAAAAAAAADtAAAAAAAAAN0AAAAAAAAA5QAAAAAAAAD5AAAAAAAAANkAAAAAAAAA3QAAAAAAAADpAAAAAAAAAOEAAAAAAAAA5QAAAAAAAADlAAAAAAAAAOkAAAAAAAABCQAAAAAAAADZAAAAAAAAAO0AAAAAAAABCQAAAAAAAAD9AAAAAAAAAOEAAAAAAAAA4QAAAAAAAAEVAAAAAAAAAN0AAAAAAAABEQAAAAAAAgEFAAAAAAAAAPUAAAAAAAAA9QAAAAAAAADxAAAAAAACAQEAAAAAAAAA3QAAAAAAAADVAAAAAAAAAQUAAAAAAAAA9QAAAAAAAAEFAAAAAAACAQ0AAAAAAAIBAQAAAAAAAgEFAAAAAAAAAREAAAAAAAAA3QAAAAAAAADxAAAAAAAAAN0AAAAAAAAA4QAAAAAAAADxAAAAAAAAAPUAAAAAAAAA8QAAAAAAAADlAAAAAAAAAOUAAAAAAAAA3QAAAAAAAgEBA</binary>\n+\t\t\t\t\t</binaryDataArray>\n+\t\t\t\t</binaryDataArrayList>\n+\t\t\t</spectrum>\n+\t\t</spectrumList>\n+\t</run>\n+</mzML>\n\\ No newline at end of file\n'
b
diff -r 000000000000 -r 728ebc7ae7dd test-data/faahKO_reduce.zip
b
Binary file test-data/faahKO_reduce.zip has changed
b
diff -r 000000000000 -r 728ebc7ae7dd test-data/ko15.CDF
b
Binary file test-data/ko15.CDF has changed
b
diff -r 000000000000 -r 728ebc7ae7dd test-data/sampleMetadata.tsv
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sampleMetadata.tsv Tue Sep 18 16:04:51 2018 -0400
b
@@ -0,0 +1,5 @@
+sampleMetadata class
+ko15 KO
+ko16 KO
+wt15 WT
+wt16 WT