view model_validation.xml @ 3:424d8d21744d draft

planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit 97c4f22cdcfa6cddeeffc7b102c418a7ff12a888
author bgruening
date Tue, 05 Jun 2018 06:48:01 -0400
parents dd502cb0d567
children 60b8a683ba99
line wrap: on
line source

<tool id="sklearn_model_validation" name="Model Validation" version="@VERSION@">
    <description>evaluates estimator performance by cross-validation</description>
    <macros>
        <import>main_macros.xml</import>
    </macros>
    <expand macro="python_requirements"/>
    <expand macro="macro_stdio"/>
    <version_command>echo "@VERSION@"</version_command>
    <command>
        <![CDATA[
        python "$sklearn_model_validation_script" '$inputs'
        ]]>
    </command>
    <configfiles>
        <inputs name="inputs" />
        <configfile name="sklearn_model_validation_script">
            <![CDATA[
import sys
import json
import pandas
import ast
import pickle
import numpy as np
import sklearn.model_selection
from sklearn import svm, linear_model, ensemble
from sklearn.pipeline import Pipeline

@COLUMNS_FUNCTION@

@FEATURE_SELECTOR_FUNCTION@

input_json_path = sys.argv[1]
params = json.load(open(input_json_path, "r"))

input_type = params["input_options"]["selected_input"]
if input_type=="tabular":
    header = 'infer' if params["input_options"]["header1"] else None
    column_option = params["input_options"]["column_selector_options_1"]["selected_column_selector_option"]
    if column_option == "by_index_number":
        c = params["input_options"]["column_selector_options_1"]["col1"]
    else:
        c = None
    X = read_columns(
            "$input_options.infile1",
            c = c,
            c_option = column_option,
            sep='\t',
            header=header,
            parse_dates=True
    )
else:
    X = mmread(open("$input_options.infile1", 'r'))

header = 'infer' if params["input_options"]["header2"] else None
column_option = params["input_options"]["column_selector_options_2"]["selected_column_selector_option2"]
if column_option == "by_index_number":
    c = params["input_options"]["column_selector_options_2"]["col2"]
else:
    c = None
y = read_columns(
        "$input_options.infile2",
        c = c,
        c_option = column_option,
        sep='\t',
        header=header,
        parse_dates=True
)
y=y.ravel()

options = params["model_validation_functions"]["options"]
if 'scoring' in options and options['scoring'] == '':
    options['scoring'] = None
if 'pre_dispatch' in options and options['pre_dispatch'] == '':
    options['pre_dispatch'] = None

pipeline_steps = []

## Set up feature selector and add to pipeline steps.
if params['feature_selection']['do_feature_selection'] == 'Yes':
    feature_selector = feature_selector(params['feature_selection']['feature_selection_algorithms'])
    pipeline_steps.append( ('feature_selector', feature_selector))

## Set up estimator and add to pipeline.
estimator=params["model_validation_functions"]["estimator"]
if params["model_validation_functions"]["extra_estimator"]["has_estimator"] == 'no':
    estimator = params["model_validation_functions"]["extra_estimator"]["new_estimator"]
estimator = eval(estimator.replace('__dq__', '"').replace("__sq__","'"))

pipeline_steps.append( ('estimator', estimator) )

pipeline = Pipeline(pipeline_steps)

## Set up validator, run pipeline through validator and return results.

validator = params["model_validation_functions"]["selected_function"]
validator = getattr(sklearn.model_selection, validator)

selected_function = params["model_validation_functions"]["selected_function"]
rval_type = params["model_validation_functions"].get("return_type", None)

if selected_function == 'cross_validate':
    res = validator(pipeline, X, y, **options)
    rval = res[rval_type]
elif selected_function == 'learning_curve':
    options['train_sizes'] = eval(options['train_sizes'])
    train_sizes_abs, train_scores, test_scores = validator(pipeline, X, y, **options)
    rval = eval(rval_type)
elif selected_function == 'permutation_test_score':
    score, permutation_scores, pvalue = validator(pipeline, X, y, **options)
    rval = eval(rval_type)
    if rval_type in ["score", "pvalue"]:
        rval = [rval]
elif selected_function == 'validation_curve':
    options['param_name'] = 'estimator__' + options['param_name']
    options['param_range'] = eval(options['param_range'])
    train_scores, test_scores = validator(pipeline, X, y, **options)
    rval = eval(rval_type)
elif selected_function == 'GridSearchCV':
    param_grid = params["model_validation_functions"]["param_grid"].replace("__sq__","'")\
        .replace('__dq__','"').replace("__oc__", "{").replace("__cc__", "}")\
        .replace("__ob__", "[").replace("__cb__", "]")
    param_grid = ast.literal_eval(param_grid)
    grid = validator(pipeline, param_grid, **options)
    grid.fit(X, y)
    rval = getattr(grid, rval_type)
    if rval_type in ["best_estimator_", "best_score_", "best_index_"]:
        rval = [rval]     
else:
    rval = validator(pipeline, X, y, **options)

rval = pandas.DataFrame(rval)
if rval_type and rval_type == "cv_results_":
    rval.to_csv(path_or_buf="$outfile", sep='\t', header=True, index=False)
else:
    rval.to_csv(path_or_buf="$outfile", sep='\t', header=False, index=False)

            ]]>
        </configfile>
    </configfiles>
    <inputs>
        <conditional name="feature_selection">
            <param name="do_feature_selection" type="select" label="Do feature selection?">
                <option value="No" selected="true"/>
                <option value="Yes"/>
            </param>
            <when value="No"/>
            <when value="Yes">
                <expand macro="feature_selection_all"/>
            </when>
        </conditional>
        <conditional name="model_validation_functions">
            <param name="selected_function" type="select" label="Select a model validation function">
                <option value="GridSearchCV">GridSearchCV - Exhaustive search over specified parameter values for an estimator </option>
                <option value="cross_validate">cross_validate - Evaluate metric(s) by cross-validation and also record fit/score times</option>
                <option value="cross_val_predict">cross_val_predict - Generate cross-validated estimates for each input data point</option>
                <option value="cross_val_score">cross_val_score - Evaluate a score by cross-validation</option>
                <option value="learning_curve">learning_curve - Learning curve</option>
                <option value="permutation_test_score">permutation_test_score - Evaluate the significance of a cross-validated score with permutations</option>
                <option value="validation_curve">validation_curve - Validation curve</option>
            </param>
            <when value="GridSearchCV">
                <expand macro="estimator_input_no_fit" />
                <param argument="param_grid" type="text" value="[{'feature_selector__k': [3, 5, 7, 9], 'estimator__C': [1, 10, 100, 1000]}]" label="param_grid" help="Dictionary with parameters names (string) as keys and lists of parameter settings to try as values, or a list of such dictionaries, in which case the grids spanned by each dictionary in the list are explored"/>
                <section name="options" title="Other Options" expanded="false">
                    <expand macro="scoring"/>
                    <expand macro="model_validation_common_options"/>
                    <expand macro="pre_dispatch" value="2*n_jobs" help="Controls the number of jobs that get dispatched during parallel execution"/>
                    <param argument="iid" type="boolean" truevalue="booltrue" falsevalue="boolfalse" checked="true" label="iid" help="Data is identically distributed?"/>
                    <param argument="refit" type="boolean" truevalue="booltrue" falsevalue="boolfalse" checked="true" label="refit" help="Refit an estimator using the best found parameters on the whole dataset."/>
                    <!--error_score-->
                    <param argument="return_train_score" type="boolean" truevalue="booltrue" falsevalue="boolfalse" checked="false" label="return_train_score" help=""/>
                </section>
                <param name="return_type" type="select" label="Select a return type">
                    <option value="cv_results_" selected="true">cv_results_</option>
                    <option value="best_estimator_">best_estimator_</option>
                    <option value="best_score_">best_score_</option>
                    <option value="best_params_">best_params_</option>
                    <option value="best_index_">best_index_</option>
                </param>
            </when>
            <when value="cross_validate">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <!--groups-->
                    <expand macro="model_validation_common_options"/>
                    <expand macro="scoring"/>
                    <!--fit_params-->
                    <expand macro="pre_dispatch"/>
                </section>
                <param name="return_type" type="select" label="Select a return type">
                    <option value="test_score" selected="true">test_score</option>
                    <option value="train_score">train_score</option>
                    <option value="fit_time">fit_time</option>
                    <option value="score_time">score_time</option>
                </param>
            </when>
            <when value="cross_val_predict">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <!--groups-->
                    <expand macro="model_validation_common_options" />
                    <!--fit_params-->
                    <expand macro="pre_dispatch" value="2*n_jobs’" help="Controls the number of jobs that get dispatched during parallel execution"/>
                    <param argument="method" type="select" label="Invokes the passed method name of the passed estimator">
                        <option value="predict" selected="true">predict</option>
                        <option value="predict_proba">predict_proba</option>
                    </param>
                </section>
            </when>
            <when value="cross_val_score">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <!--groups-->
                    <expand macro="model_validation_common_options"/>
                    <expand macro="scoring"/>
                    <!--fit_params-->
                    <expand macro="pre_dispatch"/>
                </section>
            </when>
            <when value="learning_curve">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <!--groups-->
                    <expand macro="model_validation_common_options"/>
                    <param argument="train_sizes" type="text" value="np.linspace(0.1, 1.0, 5)" label="train_sizes" help="Relative or absolute numbers of training examples that will be used to generate the learning curve"/>
                    <expand macro="scoring"/>
                    <param argument="exploit_incremental_learning" type="boolean" optional="true" truevalue="booltrue" falsevalue="boolfalse" checked="false" label="exploit_incremental_learning" help="Whether to apply incremental learning to speed up fitting of the estimator if supported"/>
                    <expand macro="pre_dispatch"/>
                    <expand macro="shuffle" checked="false" label="shuffle" help="Whether to shuffle training data before taking prefixes"/>
                    <expand macro="random_state"/>
                </section>
                <param name="return_type" type="select" label="Select a return type">
                    <option value="train_sizes_abs" selected="true">train_sizes_abs</option>
                    <option value="train_scores">train_scores</option>
                    <option value="test_scores">test_scores</option>
                </param>
            </when>
            <when value="permutation_test_score">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <!--groups-->
                    <expand macro="model_validation_common_options"/>
                    <expand macro="scoring"/>
                    <param name="n_permutations" type="integer" value="100" optional="true" label="n_permutations" help="Number of times to permute y"/>
                    <expand macro="random_state"/>
                </section>
                <param name="return_type" type="select" label="Select a return type">
                    <option value="score" selected="true">score</option>
                    <option value="permutation_scores">permutation_scores</option>
                    <option value="pvalue">pvalue</option>
                </param>
            </when>
            <when value="validation_curve">
                <expand macro="estimator_input_no_fit" />
                <section name="options" title="Other Options" expanded="false">
                    <param name="param_name" type="text" value="gamma" label="param_name" help="Name of the parameter that will be varied"/>
                    <param name="param_range" type="text" value="np.logspace(-6, -1, 5)" label="param_range" help="The values of the parameter that will be evaluated."/>
                    <!--groups-->                    
                    <expand macro="model_validation_common_options"/>
                    <expand macro="scoring"/>
                    <expand macro="pre_dispatch"/>
                </section>
                <param name="return_type" type="select" label="Select a return type">
                    <option value="train_scores" selected="true">train_scores</option>
                    <option value="test_scores">test_scores</option>
                </param>
            </when>
        </conditional>
        <expand macro="sl_mixed_input"/>
    </inputs>
    <outputs>
        <data format="tabular" name="outfile"/>
    </outputs>
    <tests>
        <test>
            <param name="selected_function" value="cross_validate"/>
            <param name="estimator" value="linear_model.LassoCV()"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_train.tabular" ftype="tabular"/>
            <param name="col1" value="1,2,3,4,5"/>
            <param name="infile2" value="regression_train.tabular" ftype="tabular"/>
            <param name="col2" value="6"/>
            <output name="outfile" file="mv_result01.tabular"/>
        </test>
        <test>
            <param name="selected_function" value="cross_val_predict"/>
            <param name="estimator" value="linear_model.LassoCV()"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_train.tabular" ftype="tabular"/>
            <param name="col1" value="1,2,3,4,5"/>
            <param name="infile2" value="regression_train.tabular" ftype="tabular"/>
            <param name="col2" value="6"/>
            <output name="outfile" file="mv_result02.tabular"/>
        </test>
        <test>
            <param name="selected_function" value="cross_val_score"/>
            <param name="estimator" value="linear_model.LassoCV()"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_train.tabular" ftype="tabular"/>
            <param name="col1" value="1,2,3,4,5"/>
            <param name="infile2" value="regression_train.tabular" ftype="tabular"/>
            <param name="col2" value="6"/>
            <output name="outfile" file="mv_result03.tabular"/>
        </test>
        <test>
            <param name="selected_function" value="learning_curve"/>
            <param name="estimator" value="linear_model.LassoCV()"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_X.tabular" ftype="tabular"/>
            <param name="header1" value="true" />
            <param name="col1" value="1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17"/>
            <param name="infile2" value="regression_y.tabular" ftype="tabular"/>
            <param name="header2" value="true" />
            <param name="col2" value="1"/>
            <output name="outfile" file="mv_result04.tabular"/>
        </test>
        <test>
            <param name="selected_function" value="permutation_test_score"/>
            <param name="estimator" value="linear_model.LassoCV()"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_train.tabular" ftype="tabular"/>
            <param name="col1" value="1,2,3,4,5"/>
            <param name="infile2" value="regression_train.tabular" ftype="tabular"/>
            <param name="col2" value="6"/>
            <output name="outfile" file="mv_result05.tabular"/>
        </test>
        <test>
            <param name="selected_function" value="validation_curve"/>
            <param name="estimator" value="svm.SVC(kernel=&quot;linear&quot;)"/>
            <param name="has_estimator" value="yes"/>
            <param name="infile1" value="regression_X.tabular" ftype="tabular"/>
            <param name="header1" value="true" />
            <param name="selected_column_selector_option" value="all_columns"/>
            <param name="infile2" value="regression_y.tabular" ftype="tabular"/>
            <param name="header2" value="true" />
            <param name="col2" value="1"/>
            <param name="return_type" value="test_scores"/>
            <output name="outfile" file="mv_result06.tabular"/>
        </test>
        <test>
            <param name="do_feature_selection" value="Yes"/>
            <param name="selected_algorithm" value="SelectKBest"/>
            <param name="score_func" value="chi2"/>
            <param name="selected_function" value="GridSearchCV"/>
            <param name="estimator" value="svm.SVR(kernel=&quot;linear&quot;)"/>
            <param name="has_estimator" value="yes"/>
            <param name="param_grid" value="[{'feature_selector__k': [3, 7], 'estimator__C': [1, 100]}]"/>
            <param name="return_type" value="best_score_"/>
            <param name="infile1" value="regression_X.tabular" ftype="tabular"/>
            <param name="header1" value="true" />
            <param name="selected_column_selector_option" value="all_columns"/>
            <param name="infile2" value="regression_y.tabular" ftype="tabular"/>
            <param name="header2" value="true" />
            <param name="selected_column_selector_option2" value="all_columns"/>
            <output name="outfile" file="mv_result07.tabular"/>
        </test>
    </tests>
    <help>
        <![CDATA[
**What it does**
This tool includes model validation functions to evaluate estimator performance in the cross-validation approach. This tool is based on
sklearn.model_selection package.
For information about classification metric functions and their parameter settings please refer to `Scikit-learn classification metrics`_.

.. _`Scikit-learn classification metrics`: http://scikit-learn.org/stable/modules/model_evaluation.html#classification-metrics
        ]]>
    </help>
    <expand macro="sklearn_citation"/>
</tool>